summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLars Wirzenius <liw@liw.fi>2019-02-27 12:16:15 +0200
committerLars Wirzenius <liw@liw.fi>2019-02-27 12:16:15 +0200
commitf9ba0c8565763cc185d2fbe94bb0296d845f210e (patch)
tree688b3054b973dcc3aaf76fc70deb0ea564da404b
parentb89ea860741543e93101f851591606ae88743093 (diff)
downloadmuck-poc-f9ba0c8565763cc185d2fbe94bb0296d845f210e.tar.gz
Change: benchmark-log to combine log snippets in Muck
-rwxr-xr-xbenchmark-log1
-rwxr-xr-xdummy-logger106
2 files changed, 85 insertions, 22 deletions
diff --git a/benchmark-log b/benchmark-log
index 5443b35..a9a5872 100755
--- a/benchmark-log
+++ b/benchmark-log
@@ -47,4 +47,3 @@ trap stop EXIT
sleep 2
./dummy-logger http://127.0.0.1:12765 "$token" "$N"
-
diff --git a/dummy-logger b/dummy-logger
index ae53d35..32d6693 100755
--- a/dummy-logger
+++ b/dummy-logger
@@ -24,20 +24,20 @@ import time
import requests
-def generate_snippets(n):
+MAX_SNIPPETS = 1000
+
+
+def generate_snippet_texts(n):
for i in range(n):
yield 'log line {}\n'.format(i)
+
def full_log(n):
- return ''.join(generate_snippets(n))
+ return ''.join(generate_snippet_texts(n))
+
-def store_snippet(url, token, i, snippet):
+def store_object(url, token, obj):
url = '{}/res'.format(url)
- obj = {
- '_type': 'snippet',
- 'seq': i,
- 'text': snippet,
- }
headers = {
'Authorization': 'Bearer {}'.format(token),
'Content-Type': 'application/json',
@@ -45,11 +45,8 @@ def store_snippet(url, token, i, snippet):
r = requests.post(url, headers=headers, data=json.dumps(obj))
assert r.ok
-def create_snippets(url, token, n):
- for i, snippet in enumerate(generate_snippets(n)):
- store_snippet(url, token, i, snippet)
-def get_snippet(url, token, rid):
+def get_object(url, token, rid):
url = '{}/res'.format(url)
headers = {
'Authorization': 'Bearer {}'.format(token),
@@ -61,15 +58,15 @@ def get_snippet(url, token, rid):
return r.json()
-def get_snippet_ids(url, token):
+def search_objects_of_type(url, token, type_name):
search = '{}/search'.format(url)
body = {
'cond': [
{
"where": "data",
+ "op": "==",
"field": "_type",
- "pattern": "snippet",
- "op": "=="
+ "pattern": type_name,
},
],
}
@@ -83,10 +80,67 @@ def get_snippet_ids(url, token):
obj = r.json()
return obj['resources']
-def get_full_log(url, token, ids):
- snippets = [get_snippet(url, token, rid) for rid in ids]
- snippets.sort(key=lambda o: o['seq'])
- return ''.join(o['text'] for o in snippets)
+
+def delete_object(url, token, rid):
+ url = '{}/res'.format(url)
+ headers = {
+ 'Authorization': 'Bearer {}'.format(token),
+ 'Muck-Id': rid,
+ }
+ r = requests.delete(url, headers=headers)
+ assert r.ok
+
+
+def time_to_combine(seq):
+ # seq numbers start at 0
+ num = seq + 1
+ return (num % MAX_SNIPPETS) == 0
+
+
+def combine_snippets(url, token):
+ snippets = []
+ first_seq = None
+ rids = search_objects_of_type(url, token, 'snippet')
+
+ if not rids:
+ return
+
+ first = get_object(url, token, rids[0])
+ combined = {
+ '_type': 'combined-snippet',
+ 'seq': first['seq'],
+ 'text': assemble_from_objects(url, token, rids),
+ }
+
+ store_object(url, token, combined)
+
+ for rid in rids:
+ delete_object(url, token, rid)
+
+
+def create_snippets(url, token, n):
+ for i, snippet in enumerate(generate_snippet_texts(n)):
+ obj = {
+ '_type': 'snippet',
+ 'seq': i,
+ 'text': snippet,
+ }
+ store_object(url, token, obj)
+ if time_to_combine(i):
+ combine_snippets(url, token)
+
+
+def assemble_from_objects(url, token, ids):
+ objs = [get_object(url, token, rid) for rid in ids]
+ objs.sort(key=lambda o: o['seq'])
+ return ''.join(o['text'] for o in objs)
+
+
+def get_full_log(url, token, combined_ids, snippet_ids):
+ combined = assemble_from_objects(url, token, combined_ids)
+ snippets = assemble_from_objects(url, token, snippet_ids)
+ return combined + snippets
+
def measure(func):
started = time.time()
@@ -94,6 +148,7 @@ def measure(func):
now = time.time()
return now - started, ret
+
url = sys.argv[1]
token = sys.argv[2]
N = int(sys.argv[3])
@@ -101,11 +156,19 @@ N = int(sys.argv[3])
print('creating snippets')
creation_secs, _ = measure(lambda: create_snippets(url, token, N))
+print('getting list of combined snippets')
+get_combined_secs, combined_ids = measure(
+ lambda: search_objects_of_type(url, token, 'combined-snippet'))
+print('combined ids', len(combined_ids))
+
print('getting list of snippets')
-get_snippets_secs, ids = measure(lambda: get_snippet_ids(url, token))
+get_snippets_secs, snippet_ids = measure(
+ lambda: search_objects_of_type(url, token, 'snippet'))
+print('snippet ids', len(snippet_ids))
print('reconstructing full log')
-get_log_secs, log = measure(lambda: get_full_log(url, token, ids))
+get_log_secs, log = measure(
+ lambda: get_full_log(url, token, combined_ids, snippet_ids))
expected = full_log(N)
if expected != log:
@@ -116,5 +179,6 @@ if expected != log:
print('OK')
print('%.0f' % creation_secs, 'creation time')
+print('%.0f' % get_combined_secs, 'list combined snippets')
print('%.0f' % get_snippets_secs, 'list snippets')
print('%.0f' % get_log_secs, 'assemble log')