From b65d36d2ae183a3e2005d5c890a6550f8e283d17 Mon Sep 17 00:00:00 2001 From: Rafal Mucha <6rafel6@gmail.com> Date: Mon, 15 Jun 2020 20:52:30 +0200 Subject: [PATCH 01/13] Fix types error --- sebs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sebs.py b/sebs.py index 718d1098..b2bf322a 100755 --- a/sebs.py +++ b/sebs.py @@ -230,7 +230,8 @@ "deployment": deployment_client.config.serialize(), } with open("experiments.json", "w") as out_f: - json.dump(benchmark_summary, out_f, indent=2) + print(benchmark_summary) + json.dump(str(benchmark_summary), out_f, indent=2) elif args.action == "experiment": # Prepare benchmark input input_config = prepare_input( From 7ea80e172bbdc1d12a4c207a0b05b3e2171e401c Mon Sep 17 00:00:00 2001 From: Rafal Mucha <6rafel6@gmail.com> Date: Mon, 15 Jun 2020 20:52:30 +0200 Subject: [PATCH 02/13] Fix types error --- sebs.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sebs.py b/sebs.py index 86326476..0965fa52 100755 --- a/sebs.py +++ b/sebs.py @@ -231,7 +231,8 @@ "deployment": deployment_client.config.serialize(), } with open("experiments.json", "w") as out_f: - json.dump(benchmark_summary, out_f, indent=2) + print(benchmark_summary) + json.dump(str(benchmark_summary), out_f, indent=2) elif args.action == "experiment": # Prepare benchmark input input_config = prepare_input( From 121813779a4d50ee0019b3199578567b877e5781 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Wed, 17 Jun 2020 21:31:18 +0200 Subject: [PATCH 03/13] Add first functions --- benchmarks/meta/composer.py | 36 +++++++++++++++++++++ benchmarks/meta/memory/python/function.py | 20 ++++++++++++ benchmarks/meta/workload/python/function.py | 35 ++++++++++++++++++++ 3 files changed, 91 insertions(+) create mode 100644 benchmarks/meta/composer.py create mode 100644 benchmarks/meta/memory/python/function.py create mode 100644 benchmarks/meta/workload/python/function.py diff --git a/benchmarks/meta/composer.py b/benchmarks/meta/composer.py new file mode 100644 index 00000000..2cab62b6 --- /dev/null +++ b/benchmarks/meta/composer.py @@ -0,0 +1,36 @@ +import os + +def load_benchmark_code(benchmark_name, language="python"): + current_dir = os.getcwd() + path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "sth.js") + with open() + + +config = [ + ("workload", { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ), + ("memory", { + "size_in_bytes": 1024 * 1024 * 1024 + }), + ("workload", { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ) +] + +code = "" + +benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + +# load code of benchmarks +code_map = { + benchmark_name: load_benchmark_code(benchmark_name) for benchmark_name in benchmarks_list +} diff --git a/benchmarks/meta/memory/python/function.py b/benchmarks/meta/memory/python/function.py new file mode 100644 index 00000000..2a3131b3 --- /dev/null +++ b/benchmarks/meta/memory/python/function.py @@ -0,0 +1,20 @@ +#test +config = { + "size_in_bytes": 1024 * 1024 * 1024 +} +#import +import numpy as np +import time +#function +def allocate(size_in_bytes): + t0 = time.clock() + arr = np.ones(int(size_in_bytes/4), dtype=np.dtype("int32")) + t1 = time.clock() + return { + "time": t1 - t0, + "size_in_bytes": size_in_bytes + } +#run +size_of_allocated_memory = config.get("size_in_bytes", 1024 * 1024) # Default 1 MB +result = allocate(size_of_allocated_memory) +print(result) diff --git a/benchmarks/meta/workload/python/function.py b/benchmarks/meta/workload/python/function.py new file mode 100644 index 00000000..286c9719 --- /dev/null +++ b/benchmarks/meta/workload/python/function.py @@ -0,0 +1,35 @@ +#test +config = { + "iterations": 1000000, + "operator": "-", + "type": "float32", + "array_size": 10000 +} +#import +import numpy as np +import time +import operator as op +#function +def workload(number_of_iterations, dtype, array_size, operator): + a = np.ones(array_size, dtype=dtype) * 2 + b = np.ones(array_size, dtype=dtype) * 3 + t0 = time.clock() + for i in range(number_of_iterations): + c = operator(a, b) + t1 = time.clock() + return {"number_of_operations": number_of_iterations * array_size, + "dtype": dtype, + "time": t1 - t0} +#invoke +string_to_operator = { + "+": op.add, + "-": op.sub, + "*": op.mul, + "/": op.truediv, +} +element_type = np.dtype(config.get("type", np.float)) +number_of_iterations = config.get("iterations", 10000) +array_size = config.get("array_size", 100) +operator = string_to_operator[config.get("operator", "+")] +result = workload(number_of_iterations, element_type, array_size, operator)) +print(result) From 03b1ac21dfcd8f36f211d674efe3316e1625ed37 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Wed, 17 Jun 2020 22:52:41 +0200 Subject: [PATCH 04/13] Add working composer --- benchmarks/meta/composer.py | 30 +++++++++++++++++++-- benchmarks/meta/workload/python/function.py | 4 +-- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/benchmarks/meta/composer.py b/benchmarks/meta/composer.py index 2cab62b6..35cba2c9 100644 --- a/benchmarks/meta/composer.py +++ b/benchmarks/meta/composer.py @@ -3,7 +3,18 @@ def load_benchmark_code(benchmark_name, language="python"): current_dir = os.getcwd() path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "sth.js") - with open() + with open(path_to_code, "r") as source_file: + source_code = source_file.read() + [_, after_test] = source_code.split("#test") + [_, after_import] = after_test.split("#import") + [import_part, after_function] = after_import.split("#function") + [function_part, run_part] = after_function.split("#run") + return { + "import": import_part, + "function": function_part, + "run": run_part + } + config = [ @@ -31,6 +42,21 @@ def load_benchmark_code(benchmark_name, language="python"): benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} # load code of benchmarks -code_map = { +code_maps = { benchmark_name: load_benchmark_code(benchmark_name) for benchmark_name in benchmarks_list } + +# add imports +for code_map in code_maps.values(): + code += code_map["import"] + "\n" + +#add functions +for code_map in code_maps.values(): + code += code_map["function"] + "\n" + +#add invoke of benchmarks +for (benchmark_name, benchmark_config) in config: + code += "config = " + str(benchmark_config) + code += code_maps[benchmark_name]["run"] + +print(code) \ No newline at end of file diff --git a/benchmarks/meta/workload/python/function.py b/benchmarks/meta/workload/python/function.py index 286c9719..b3992449 100644 --- a/benchmarks/meta/workload/python/function.py +++ b/benchmarks/meta/workload/python/function.py @@ -20,7 +20,7 @@ def workload(number_of_iterations, dtype, array_size, operator): return {"number_of_operations": number_of_iterations * array_size, "dtype": dtype, "time": t1 - t0} -#invoke +#run string_to_operator = { "+": op.add, "-": op.sub, @@ -31,5 +31,5 @@ def workload(number_of_iterations, dtype, array_size, operator): number_of_iterations = config.get("iterations", 10000) array_size = config.get("array_size", 100) operator = string_to_operator[config.get("operator", "+")] -result = workload(number_of_iterations, element_type, array_size, operator)) +result = workload(number_of_iterations, element_type, array_size, operator) print(result) From 7db6598006d8121ad47c24c5d48aac405877312a Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Thu, 2 Jul 2020 22:21:58 +0200 Subject: [PATCH 05/13] Add benchmarks generator --- benchmark_generator/code_composer.py | 61 +++++++++++++++++++ benchmark_generator/disc/python/function.py | 31 ++++++++++ .../function_input/python/function.py | 16 +++++ benchmark_generator/generator.py | 34 +++++++++++ benchmark_generator/input_composer.py | 27 ++++++++ benchmark_generator/memory/python/function.py | 22 +++++++ .../memory/python/requirements.txt | 1 + .../network/python/function.py | 13 ++++ .../network/python/requirements.txt | 1 + benchmark_generator/requirements_composer.py | 22 +++++++ .../workload/python/function.py | 37 +++++++++++ .../workload/python/requirements.txt | 1 + config/generator_config.json | 43 +++++++++++++ generate.sh | 2 + 14 files changed, 311 insertions(+) create mode 100644 benchmark_generator/code_composer.py create mode 100644 benchmark_generator/disc/python/function.py create mode 100644 benchmark_generator/function_input/python/function.py create mode 100644 benchmark_generator/generator.py create mode 100644 benchmark_generator/input_composer.py create mode 100644 benchmark_generator/memory/python/function.py create mode 100644 benchmark_generator/memory/python/requirements.txt create mode 100644 benchmark_generator/network/python/function.py create mode 100644 benchmark_generator/network/python/requirements.txt create mode 100644 benchmark_generator/requirements_composer.py create mode 100644 benchmark_generator/workload/python/function.py create mode 100644 benchmark_generator/workload/python/requirements.txt create mode 100644 config/generator_config.json create mode 100755 generate.sh diff --git a/benchmark_generator/code_composer.py b/benchmark_generator/code_composer.py new file mode 100644 index 00000000..4913ee06 --- /dev/null +++ b/benchmark_generator/code_composer.py @@ -0,0 +1,61 @@ +import os + +def load_benchmark_code(benchmark_name, language="python"): + current_dir = os.getcwd() + path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "sth.js") + if os.path.exists(path_to_code): + with open(path_to_code, "r") as source_file: + source_code = source_file.read() + [_, after_test] = source_code.split("#test") + [_, after_import] = after_test.split("#import") + [import_part, after_function] = after_import.split("#function") + [function_part, run_part] = after_function.split("#run") + return { + "import": import_part, + "function": function_part, + "run": run_part + } + return { + "import": "", + "function": "", + "run": "" + } + +def intend(body): + new_body = "" + for line in body.splitlines(): + new_body += "\n\t" + line + return new_body + +def compose(config): + + code = "" + + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + # load code of benchmarks + code_maps = { + benchmark_name: load_benchmark_code(benchmark_name) for benchmark_name in benchmarks_list + } + + # add imports + for code_map in code_maps.values(): + code += code_map["import"] + "\n" + + #add functions + for code_map in code_maps.values(): + code += code_map["function"] + "\n" + + code += "\ndef handler(event):\n" + #add invoke of benchmarks + handler_function = "result = {}\n" + for (number, (benchmark_name, benchmark_config)) in enumerate(config): + handler_function += "\nnumber = " + str(number) + "\n" + handler_function += "config = " + str(benchmark_config) + "\n" + handler_function += code_maps[benchmark_name]["run"] + + handler_function += """\nresult['result'] = 100\nreturn {'result': result }""" # dummy result, different doesn't work + + code += intend(handler_function) + + return code diff --git a/benchmark_generator/disc/python/function.py b/benchmark_generator/disc/python/function.py new file mode 100644 index 00000000..2be7af48 --- /dev/null +++ b/benchmark_generator/disc/python/function.py @@ -0,0 +1,31 @@ +#test +config = { + "block_size": 1024*1024*128 +} +result = {} +number = 0 +#import +import numpy as np +import time +import uuid +import os +#function +def test_disc(block_size, file_name): + a = np.ones(int(block_size / 4), dtype=np.dtype("int32")) * 2 + t0 = time.clock() + np.save(file_name, a) + t1 = time.clock() + t2 = time.clock() + np.load(file_name) + t3 = time.clock() + + write_time = t1 - t0 + read_time = t3 - t2 + return {"block_size": block_size, + "write_time": write_time, + "read_time": read_time} +#run +block_size = config.get("block_size", 100) +file_name = "/tmp/sebs.npy" +result[str(number)] = test_disc(block_size, file_name) +print(result) diff --git a/benchmark_generator/function_input/python/function.py b/benchmark_generator/function_input/python/function.py new file mode 100644 index 00000000..e81af07f --- /dev/null +++ b/benchmark_generator/function_input/python/function.py @@ -0,0 +1,16 @@ +#test +config = { + "output_size": 100 +} +result = {} +#import +import uuid +#function +def fill_dict(dict_to_fill, number_of_entries): + for i in range(number_of_entries): + dict_to_fill[str(uuid.uuid1())] = str(uuid.uuid1()) + +#run +number_of_entries = config.get("output_size") +fill_dict(result, number_of_entries) +print(result) \ No newline at end of file diff --git a/benchmark_generator/generator.py b/benchmark_generator/generator.py new file mode 100644 index 00000000..2e572197 --- /dev/null +++ b/benchmark_generator/generator.py @@ -0,0 +1,34 @@ +import sys +import json +import code_composer +import requirements_composer +import input_composer +import os + +if len(sys.argv) < 2: + print("Missing argument, path to config") + +with open(sys.argv[1]) as config_file: + config = json.load(config_file) + +# Generate directory for benchmark +path_to_benchmark = "./../benchmarks/600.generated/610.generated/python" +if not os.path.exists(path_to_benchmark): + os.makedirs(path_to_benchmark) + +# Push code to benchmarks/600.generated/610.generated/python/function.py + +with open(path_to_benchmark + "/function.py", "w+") as code_file: + code = code_composer.compose(config) + code_file.write(code) + +# Push requirements to benchmarks/600.generated/610.generated/python/requirements.txt +with open(path_to_benchmark + "/requirements.txt", "w+") as requirements_file: + requirements = requirements_composer.compose(config) + print("Req: " + requirements) + requirements_file.write(requirements) + +# Create input.py file +with open(path_to_benchmark + "/../input.py", "w+") as input_file: + code = input_composer.compose(config) + input_file.write(code) \ No newline at end of file diff --git a/benchmark_generator/input_composer.py b/benchmark_generator/input_composer.py new file mode 100644 index 00000000..0cf2922e --- /dev/null +++ b/benchmark_generator/input_composer.py @@ -0,0 +1,27 @@ +import uuid + +def compose(config): + input_dict = {} + print(config) + for (benchmark, benchmark_config) in config: + if benchmark == "function_input" and "input_size" in benchmark_config.keys(): + # input size is measured by number of elements + for i in range(int(benchmark_config["input_size"])): + input_dict[str(uuid.uuid1())] = 100 + + # add needed values + + # generate code + code = "" + code += "input_dict = " + str(input_dict) + "\n" + + + code += """def buckets_count(): + return (0, 0)\n""" + + + code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): + return input_dict """ + return code + + \ No newline at end of file diff --git a/benchmark_generator/memory/python/function.py b/benchmark_generator/memory/python/function.py new file mode 100644 index 00000000..77809ccf --- /dev/null +++ b/benchmark_generator/memory/python/function.py @@ -0,0 +1,22 @@ +#test +config = { + "size_in_bytes": 1024 * 1024 +} +result = {} +number = 0 +#import +import numpy as np +import time +#function +def allocate(size_in_bytes): + t0 = time.clock() + arr = np.ones(int(size_in_bytes/4), dtype=np.dtype("int32")) + t1 = time.clock() + return { + "time": t1 - t0, + "size_in_bytes": size_in_bytes + } +#run +size_of_allocated_memory = config.get("size_in_bytes", 1024 * 1024) # Default 1 MB +result[str(number)] = (allocate(size_of_allocated_memory)) +print(result) diff --git a/benchmark_generator/memory/python/requirements.txt b/benchmark_generator/memory/python/requirements.txt new file mode 100644 index 00000000..683f00cc --- /dev/null +++ b/benchmark_generator/memory/python/requirements.txt @@ -0,0 +1 @@ +numpy==1.18.5 \ No newline at end of file diff --git a/benchmark_generator/network/python/function.py b/benchmark_generator/network/python/function.py new file mode 100644 index 00000000..8517e6d3 --- /dev/null +++ b/benchmark_generator/network/python/function.py @@ -0,0 +1,13 @@ +#test +result = {} +number = 0 +#import +import speedtest +#function +def test_network(): + s = speedtest.Speedtest() + return {"upload": s.upload(), + "download": s.download()} +#run +result[str(number)] = test_network() +print(result) diff --git a/benchmark_generator/network/python/requirements.txt b/benchmark_generator/network/python/requirements.txt new file mode 100644 index 00000000..74f935b6 --- /dev/null +++ b/benchmark_generator/network/python/requirements.txt @@ -0,0 +1 @@ +speedtest-cli==2.1.2 \ No newline at end of file diff --git a/benchmark_generator/requirements_composer.py b/benchmark_generator/requirements_composer.py new file mode 100644 index 00000000..8bc91c06 --- /dev/null +++ b/benchmark_generator/requirements_composer.py @@ -0,0 +1,22 @@ +import os + +def load_benchmark_requirements(benchmark_name, language="python"): + current_dir = os.getcwd() + path_to_requirements = os.path.join(current_dir, benchmark_name, language, "requirements.txt" if language == "python" else "sth.js") + if os.path.exists(path_to_requirements) and os.path.isfile(path_to_requirements): + with open(path_to_requirements, "r") as source_file: + requirements = source_file.read() + return requirements + else: + print("Path to: " + path_to_requirements + " doenst exist") + return "" + +def compose(config): + + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + requirements_for_all_benchmarks = "" + for benchmark_name in benchmarks_list: + requirements_for_all_benchmarks += "\n" + load_benchmark_requirements(benchmark_name) + return requirements_for_all_benchmarks + diff --git a/benchmark_generator/workload/python/function.py b/benchmark_generator/workload/python/function.py new file mode 100644 index 00000000..1552d52c --- /dev/null +++ b/benchmark_generator/workload/python/function.py @@ -0,0 +1,37 @@ +#test +config = { + "iterations": 1000000, + "operator": "-", + "type": "float32", + "array_size": 10000 +} +result = {} +number = 0 +#import +import numpy as np +import time +import operator as op +#function +def workload(number_of_iterations, dtype, array_size, operator): + a = np.ones(array_size, dtype=dtype) * 2 + b = np.ones(array_size, dtype=dtype) * 3 + t0 = time.clock() + for i in range(number_of_iterations): + c = operator(a, b) + t1 = time.clock() + return {"number_of_operations": number_of_iterations * array_size, + "dtype": str(dtype), + "time": t1 - t0} +#run +string_to_operator = { + "+": op.add, + "-": op.sub, + "*": op.mul, + "/": op.truediv, +} +element_type = np.dtype(config.get("type", np.float)) +number_of_iterations = config.get("iterations", 10000) +array_size = config.get("array_size", 100) +operator = string_to_operator[config.get("operator", "+")] +result[str(number)] = (workload(number_of_iterations, element_type, array_size, operator)) +print(result) diff --git a/benchmark_generator/workload/python/requirements.txt b/benchmark_generator/workload/python/requirements.txt new file mode 100644 index 00000000..683f00cc --- /dev/null +++ b/benchmark_generator/workload/python/requirements.txt @@ -0,0 +1 @@ +numpy==1.18.5 \ No newline at end of file diff --git a/config/generator_config.json b/config/generator_config.json new file mode 100644 index 00000000..99df8796 --- /dev/null +++ b/config/generator_config.json @@ -0,0 +1,43 @@ +[ + [ + "workload", + { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ], + [ + "memory", + { + "size_in_bytes": 1048576 + } + ], + [ + "workload", + { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ], + [ + "function_input", + { + "input_size":10, + "output_size":0 + } + ], + [ + "network", + {} + ], + [ + "disc", + { + "block_size": 100000000 + } + ] +] \ No newline at end of file diff --git a/generate.sh b/generate.sh new file mode 100755 index 00000000..91a9bb83 --- /dev/null +++ b/generate.sh @@ -0,0 +1,2 @@ +cd benchmark_generator +python3 generator.py ./../config/generator_config.json \ No newline at end of file From 3b81b1ec7842e74e7cd312459b261a2e9b5c0f7b Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 3 Jul 2020 14:06:03 +0200 Subject: [PATCH 06/13] Add new benchmark --- .gitignore | 10 +++ benchmark_generator/generator.py | 6 +- benchmark_generator/input_composer.py | 18 +++-- .../storage/python/function.py | 67 +++++++++++++++++++ benchmarks/meta/composer.py | 62 ----------------- benchmarks/meta/memory/python/function.py | 20 ------ benchmarks/meta/workload/python/function.py | 35 ---------- config/generator_config.json | 6 ++ 8 files changed, 100 insertions(+), 124 deletions(-) create mode 100644 benchmark_generator/storage/python/function.py delete mode 100644 benchmarks/meta/composer.py delete mode 100644 benchmarks/meta/memory/python/function.py delete mode 100644 benchmarks/meta/workload/python/function.py diff --git a/.gitignore b/.gitignore index e707ac85..0da12be1 100644 --- a/.gitignore +++ b/.gitignore @@ -170,3 +170,13 @@ dmypy.json sebs-* # cache cache + +# generated benchmarks +benchmarks/600.generated + +# csv +*.csv + +# generated code by sebs +scripts/code +scripts/experiments.json \ No newline at end of file diff --git a/benchmark_generator/generator.py b/benchmark_generator/generator.py index 2e572197..98814a4c 100644 --- a/benchmark_generator/generator.py +++ b/benchmark_generator/generator.py @@ -12,17 +12,17 @@ config = json.load(config_file) # Generate directory for benchmark -path_to_benchmark = "./../benchmarks/600.generated/610.generated/python" +path_to_benchmark = "./../benchmarks/600.generated/620.generated/python" if not os.path.exists(path_to_benchmark): os.makedirs(path_to_benchmark) -# Push code to benchmarks/600.generated/610.generated/python/function.py +# Push code to benchmarks/600.generated/620.generated/python/function.py with open(path_to_benchmark + "/function.py", "w+") as code_file: code = code_composer.compose(config) code_file.write(code) -# Push requirements to benchmarks/600.generated/610.generated/python/requirements.txt +# Push requirements to benchmarks/600.generated/620.generated/python/requirements.txt with open(path_to_benchmark + "/requirements.txt", "w+") as requirements_file: requirements = requirements_composer.compose(config) print("Req: " + requirements) diff --git a/benchmark_generator/input_composer.py b/benchmark_generator/input_composer.py index 0cf2922e..261e35d4 100644 --- a/benchmark_generator/input_composer.py +++ b/benchmark_generator/input_composer.py @@ -1,6 +1,8 @@ import uuid def compose(config): + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + input_dict = {} print(config) for (benchmark, benchmark_config) in config: @@ -15,12 +17,20 @@ def compose(config): code = "" code += "input_dict = " + str(input_dict) + "\n" - - code += """def buckets_count(): + if "storage" in benchmarks_list: + code += """def buckets_count(): + return (0, 1)\n""" + else: + code += """def buckets_count(): return (0, 0)\n""" - - code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): + if "storage" in benchmarks_list: + code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): + input_dict = {'bucket': {}} + input_dict['bucket']['output'] = output_buckets[0] + return input_dict """ + else: + code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): return input_dict """ return code diff --git a/benchmark_generator/storage/python/function.py b/benchmark_generator/storage/python/function.py new file mode 100644 index 00000000..31dd367e --- /dev/null +++ b/benchmark_generator/storage/python/function.py @@ -0,0 +1,67 @@ +#test +result = {} +config = { + "entries_number": 1000 +} +number = 0 +event = {} +#import +from . import storage +import uuid +import time +import traceback +import io +#function +def generate_data(entries_number): + dict_to_fill = {} + for i in range(entries_number): + dict_to_fill[str(uuid.uuid1())] = str(uuid.uuid1()) + return dict_to_fill + +def upload_to_bucket(config, bytes_buffer): + (client, output_bucket) = config + try: + key_name = client.upload_stream(output_bucket, "sebs_test.sth", bytes_buffer) # WARN- name has to have extension! Look at storage implementation + except Exception as inst: + key_name = str(inst) + "\n" + traceback.format_exc() + return key_name + +def download_from_bucket(config, file_key): + (client, output_bucket) = config + buffer = client.download_stream(output_bucket, file_key) + downloaded_size = len(buffer.tobytes()) + return downloaded_size + +def test_bucket_like(config, dict_to_upload): + string_to_upload = str(dict_to_upload) + bytes_to_upload = str.encode(string_to_upload) + buffer_to_upload = io.BytesIO(bytes_to_upload) + t0 = time.clock() + key = upload_to_bucket(config, buffer_to_upload) + t1 = time.clock() + downloaded_bytes = download_from_bucket(config, key) + t2 = time.clock() + return { + "uploaded_to_bucket_bytes": len(bytes_to_upload), + "upload_time": t1 - t0, + "downloaded_from_bucket_bytes": downloaded_bytes, + "download_time": t2 - t1, + "key": key + } + +def test_storage(dict_to_upload, config, storage_type="bucket"): + if storage_type == "bucket": + return test_bucket_like(config, dict_to_upload) + elif True: + # not implemented + pass + return {} + +#run +output_bucket = event.get('bucket').get('output') +entries_number = config.get("entries_number", 10) +client = storage.storage.get_instance() +dict_to_upload = generate_data(entries_number) +bucket_config = (client, output_bucket) +result[str(number)] = test_storage(dict_to_upload, bucket_config) +print(result) diff --git a/benchmarks/meta/composer.py b/benchmarks/meta/composer.py deleted file mode 100644 index 35cba2c9..00000000 --- a/benchmarks/meta/composer.py +++ /dev/null @@ -1,62 +0,0 @@ -import os - -def load_benchmark_code(benchmark_name, language="python"): - current_dir = os.getcwd() - path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "sth.js") - with open(path_to_code, "r") as source_file: - source_code = source_file.read() - [_, after_test] = source_code.split("#test") - [_, after_import] = after_test.split("#import") - [import_part, after_function] = after_import.split("#function") - [function_part, run_part] = after_function.split("#run") - return { - "import": import_part, - "function": function_part, - "run": run_part - } - - - -config = [ - ("workload", { - "iterations": 10000, - "operator": "-", - "type": "float32", - "array_size": 1000 - } - ), - ("memory", { - "size_in_bytes": 1024 * 1024 * 1024 - }), - ("workload", { - "iterations": 10000, - "operator": "-", - "type": "float32", - "array_size": 1000 - } - ) -] - -code = "" - -benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} - -# load code of benchmarks -code_maps = { - benchmark_name: load_benchmark_code(benchmark_name) for benchmark_name in benchmarks_list -} - -# add imports -for code_map in code_maps.values(): - code += code_map["import"] + "\n" - -#add functions -for code_map in code_maps.values(): - code += code_map["function"] + "\n" - -#add invoke of benchmarks -for (benchmark_name, benchmark_config) in config: - code += "config = " + str(benchmark_config) - code += code_maps[benchmark_name]["run"] - -print(code) \ No newline at end of file diff --git a/benchmarks/meta/memory/python/function.py b/benchmarks/meta/memory/python/function.py deleted file mode 100644 index 2a3131b3..00000000 --- a/benchmarks/meta/memory/python/function.py +++ /dev/null @@ -1,20 +0,0 @@ -#test -config = { - "size_in_bytes": 1024 * 1024 * 1024 -} -#import -import numpy as np -import time -#function -def allocate(size_in_bytes): - t0 = time.clock() - arr = np.ones(int(size_in_bytes/4), dtype=np.dtype("int32")) - t1 = time.clock() - return { - "time": t1 - t0, - "size_in_bytes": size_in_bytes - } -#run -size_of_allocated_memory = config.get("size_in_bytes", 1024 * 1024) # Default 1 MB -result = allocate(size_of_allocated_memory) -print(result) diff --git a/benchmarks/meta/workload/python/function.py b/benchmarks/meta/workload/python/function.py deleted file mode 100644 index b3992449..00000000 --- a/benchmarks/meta/workload/python/function.py +++ /dev/null @@ -1,35 +0,0 @@ -#test -config = { - "iterations": 1000000, - "operator": "-", - "type": "float32", - "array_size": 10000 -} -#import -import numpy as np -import time -import operator as op -#function -def workload(number_of_iterations, dtype, array_size, operator): - a = np.ones(array_size, dtype=dtype) * 2 - b = np.ones(array_size, dtype=dtype) * 3 - t0 = time.clock() - for i in range(number_of_iterations): - c = operator(a, b) - t1 = time.clock() - return {"number_of_operations": number_of_iterations * array_size, - "dtype": dtype, - "time": t1 - t0} -#run -string_to_operator = { - "+": op.add, - "-": op.sub, - "*": op.mul, - "/": op.truediv, -} -element_type = np.dtype(config.get("type", np.float)) -number_of_iterations = config.get("iterations", 10000) -array_size = config.get("array_size", 100) -operator = string_to_operator[config.get("operator", "+")] -result = workload(number_of_iterations, element_type, array_size, operator) -print(result) diff --git a/config/generator_config.json b/config/generator_config.json index 99df8796..8dbd4c4d 100644 --- a/config/generator_config.json +++ b/config/generator_config.json @@ -39,5 +39,11 @@ { "block_size": 100000000 } + ], + [ + "storage", + { + "entries_number": 1000000 + } ] ] \ No newline at end of file From e2280e6baf3db266f856aa84ad77b7f02b19402d Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 3 Jul 2020 14:09:22 +0200 Subject: [PATCH 07/13] Fix sebs.py --- sebs.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sebs.py b/sebs.py index 0965fa52..ad138ad1 100755 --- a/sebs.py +++ b/sebs.py @@ -200,6 +200,7 @@ benchmark = sebs_client.get_benchmark( args.benchmark, output_dir, deployment_client, experiment_config ) + print(benchmark) storage = deployment_client.get_storage(experiment_config) input_config = benchmark.prepare_input(storage=storage, size=args.size) func = deployment_client.get_function(benchmark) @@ -221,7 +222,7 @@ { "begin": float(begin.strftime("%s.%f")), "end": float(end.strftime("%s.%f")), - "result": ret, + "result": ret.output, } ] if bucket: @@ -232,7 +233,7 @@ } with open("experiments.json", "w") as out_f: print(benchmark_summary) - json.dump(str(benchmark_summary), out_f, indent=2) + json.dump(benchmark_summary, out_f, indent=2) elif args.action == "experiment": # Prepare benchmark input input_config = prepare_input( From 5d7407af968704c14d4d1a4e181da9158d2622ef Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 3 Jul 2020 14:45:35 +0200 Subject: [PATCH 08/13] Add sleep --- benchmark_generator/sleep/python/function.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 benchmark_generator/sleep/python/function.py diff --git a/benchmark_generator/sleep/python/function.py b/benchmark_generator/sleep/python/function.py new file mode 100644 index 00000000..86bd3834 --- /dev/null +++ b/benchmark_generator/sleep/python/function.py @@ -0,0 +1,14 @@ +#test +config = { + "duration": 100 +} +number = 0 +result = {} +#import +from time import sleep +#function + +#run +sleep_time = config.get('duration') +sleep(sleep_time) +result[str(number)] = { 'sleep_time': sleep_time } \ No newline at end of file From 7b0db31ef493a411c373ac72afddc6b174dfc308 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 3 Jul 2020 14:46:29 +0200 Subject: [PATCH 09/13] Clean up composer --- benchmark_generator/code_composer.py | 2 +- config/generator_config.json | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/benchmark_generator/code_composer.py b/benchmark_generator/code_composer.py index 4913ee06..483e59eb 100644 --- a/benchmark_generator/code_composer.py +++ b/benchmark_generator/code_composer.py @@ -54,7 +54,7 @@ def compose(config): handler_function += "config = " + str(benchmark_config) + "\n" handler_function += code_maps[benchmark_name]["run"] - handler_function += """\nresult['result'] = 100\nreturn {'result': result }""" # dummy result, different doesn't work + handler_function += """\nreturn {'result': result }""" # dummy result, different doesn't work code += intend(handler_function) diff --git a/config/generator_config.json b/config/generator_config.json index 8dbd4c4d..d716708e 100644 --- a/config/generator_config.json +++ b/config/generator_config.json @@ -45,5 +45,11 @@ { "entries_number": 1000000 } + ], + [ + "sleep", + { + "duration": 1 + } ] ] \ No newline at end of file From a496b1358db92b922a939ba5b636718b1c2e4c41 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 3 Jul 2020 15:47:52 +0200 Subject: [PATCH 10/13] Add adding artificial code --- benchmark_generator/code_composer.py | 10 ++++++++++ config/generator_config.json | 6 ++++++ 2 files changed, 16 insertions(+) diff --git a/benchmark_generator/code_composer.py b/benchmark_generator/code_composer.py index 483e59eb..62484070 100644 --- a/benchmark_generator/code_composer.py +++ b/benchmark_generator/code_composer.py @@ -1,4 +1,5 @@ import os +import uuid def load_benchmark_code(benchmark_name, language="python"): current_dir = os.getcwd() @@ -27,6 +28,11 @@ def intend(body): new_body += "\n\t" + line return new_body +def generate_huge_dict(number_of_elements): + return { + str(uuid.uuid1()) + "-" + str(i): str(uuid.uuid1()) for i in range(number_of_elements) # uuid has more predictible size than plain numbers + } + def compose(config): code = "" @@ -54,6 +60,10 @@ def compose(config): handler_function += "config = " + str(benchmark_config) + "\n" handler_function += code_maps[benchmark_name]["run"] + if benchmark_name == "artificial_code": + number_of_elements = benchmark_config.get("number_of_elements", 0) + handler_function += "artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + handler_function += """\nreturn {'result': result }""" # dummy result, different doesn't work code += intend(handler_function) diff --git a/config/generator_config.json b/config/generator_config.json index d716708e..b98c45ef 100644 --- a/config/generator_config.json +++ b/config/generator_config.json @@ -51,5 +51,11 @@ { "duration": 1 } + ], + [ + "artificial_code", + { + "number_of_elements": 1 + } ] ] \ No newline at end of file From aec554d3e2fa7fa278ab499cc8ce7ce741e5d055 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Fri, 10 Jul 2020 16:51:34 +0200 Subject: [PATCH 11/13] Add async nodejs --- .gitignore | 5 +- benchmark_generator/code_composer.py | 76 ++++++++++++++----- .../disc/async_nodejs/function.js | 43 +++++++++++ .../disc/async_nodejs/package.json | 13 ++++ .../function_input/async_nodejs/function.js | 22 ++++++ .../function_input/async_nodejs/package.json | 13 ++++ benchmark_generator/generator.py | 51 +++++++++---- .../memory/async_nodejs/function.js | 24 ++++++ .../memory/async_nodejs/package.json | 12 +++ .../network/async_nodejs/function.js | 25 ++++++ .../network/async_nodejs/package.json | 13 ++++ benchmark_generator/requirements_composer.py | 48 +++++++++++- .../sleep/async_nodejs/function.js | 17 +++++ .../sleep/async_nodejs/package.json | 11 +++ .../storage/async_nodejs/function.js | 76 +++++++++++++++++++ .../storage/async_nodejs/package.json | 12 +++ .../storage/python/function.py | 3 +- .../workload/async_nodejs/function.js | 35 +++++++++ .../workload/async_nodejs/package.json | 12 +++ 19 files changed, 470 insertions(+), 41 deletions(-) create mode 100644 benchmark_generator/disc/async_nodejs/function.js create mode 100644 benchmark_generator/disc/async_nodejs/package.json create mode 100644 benchmark_generator/function_input/async_nodejs/function.js create mode 100644 benchmark_generator/function_input/async_nodejs/package.json create mode 100644 benchmark_generator/memory/async_nodejs/function.js create mode 100644 benchmark_generator/memory/async_nodejs/package.json create mode 100644 benchmark_generator/network/async_nodejs/function.js create mode 100644 benchmark_generator/network/async_nodejs/package.json create mode 100644 benchmark_generator/sleep/async_nodejs/function.js create mode 100644 benchmark_generator/sleep/async_nodejs/package.json create mode 100644 benchmark_generator/storage/async_nodejs/function.js create mode 100644 benchmark_generator/storage/async_nodejs/package.json create mode 100644 benchmark_generator/workload/async_nodejs/function.js create mode 100644 benchmark_generator/workload/async_nodejs/package.json diff --git a/.gitignore b/.gitignore index 0da12be1..25e78028 100644 --- a/.gitignore +++ b/.gitignore @@ -179,4 +179,7 @@ benchmarks/600.generated # generated code by sebs scripts/code -scripts/experiments.json \ No newline at end of file +scripts/experiments.json + +# +node_modules/ \ No newline at end of file diff --git a/benchmark_generator/code_composer.py b/benchmark_generator/code_composer.py index 62484070..c5a2c90c 100644 --- a/benchmark_generator/code_composer.py +++ b/benchmark_generator/code_composer.py @@ -3,7 +3,7 @@ def load_benchmark_code(benchmark_name, language="python"): current_dir = os.getcwd() - path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "sth.js") + path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "function.js") if os.path.exists(path_to_code): with open(path_to_code, "r") as source_file: source_code = source_file.read() @@ -16,6 +16,8 @@ def load_benchmark_code(benchmark_name, language="python"): "function": function_part, "run": run_part } + else: + print("Path: " + path_to_code + " not exist") return { "import": "", "function": "", @@ -33,7 +35,50 @@ def generate_huge_dict(number_of_elements): str(uuid.uuid1()) + "-" + str(i): str(uuid.uuid1()) for i in range(number_of_elements) # uuid has more predictible size than plain numbers } -def compose(config): +def generate_python_handler(config, code_maps): + code = "\ndef handler(event):\n" + #add invoke of benchmarks + handler_function = "result = {}\n" + for (number, (benchmark_name, benchmark_config)) in enumerate(config): + handler_function += "\nnumber = " + str(number) + "\n" + handler_function += "config = " + str(benchmark_config) + "\n" + handler_function += code_maps[benchmark_name]["run"] + + if benchmark_name == "artificial_code": + number_of_elements = benchmark_config.get("number_of_elements", 0) + handler_function += "artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + + handler_function += """\nreturn {'result': result }""" # dummy result, different doesn't work + + code += intend(handler_function) + + return code + + +def generate_async_nodejs_handler(config, code_maps): + code = "\nexports.handler = async function(event) {\n" + #add invoke of benchmarks + handler_function = """var result = {};\nawait (async () => { return [result, 0] })()""" + for (number, (benchmark_name, benchmark_config)) in enumerate(config): + handler_function += ".then(async ([result, number]) => {\n" + inner_function = "var config = " + str(benchmark_config) + ";\n" + inner_function += code_maps[benchmark_name]["run"] + "\n" + inner_function += "return [result, number + 1]\n" + handler_function += intend(inner_function) + handler_function += "\n})\n" + + + if benchmark_name == "artificial_code": + number_of_elements = benchmark_config.get("number_of_elements", 0) + handler_function += "var artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + ";" + + handler_function += """\nreturn {'result': result }\n}""" + + code += intend(handler_function) + + return code + +def compose(config, language): code = "" @@ -41,31 +86,22 @@ def compose(config): # load code of benchmarks code_maps = { - benchmark_name: load_benchmark_code(benchmark_name) for benchmark_name in benchmarks_list + benchmark_name: load_benchmark_code(benchmark_name, language) for benchmark_name in benchmarks_list } # add imports for code_map in code_maps.values(): - code += code_map["import"] + "\n" + code += code_map["import"] + "\n" # todo not so easy for nodejs- twice imports are not possible #add functions for code_map in code_maps.values(): code += code_map["function"] + "\n" - code += "\ndef handler(event):\n" - #add invoke of benchmarks - handler_function = "result = {}\n" - for (number, (benchmark_name, benchmark_config)) in enumerate(config): - handler_function += "\nnumber = " + str(number) + "\n" - handler_function += "config = " + str(benchmark_config) + "\n" - handler_function += code_maps[benchmark_name]["run"] - - if benchmark_name == "artificial_code": - number_of_elements = benchmark_config.get("number_of_elements", 0) - handler_function += "artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + if language == "python": + return code + generate_python_handler(config, code_maps) + elif language == "async_nodejs": + return code + generate_async_nodejs_handler(config, code_maps) + else: + return "" - handler_function += """\nreturn {'result': result }""" # dummy result, different doesn't work - - code += intend(handler_function) - - return code + diff --git a/benchmark_generator/disc/async_nodejs/function.js b/benchmark_generator/disc/async_nodejs/function.js new file mode 100644 index 00000000..8d85d4bb --- /dev/null +++ b/benchmark_generator/disc/async_nodejs/function.js @@ -0,0 +1,43 @@ +//#test +var result = {}; +var config = { + "entries_number": 1000 +}; +var number = 0; +//#import +var fs = require('fs'); +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +//#function +function generate_data(entries_number) { + var dictToFill = {}; + for(var i = 0;i < entries_number;i++) { + dictToFill[uuidv1()] = uuidv1() + } + return dictToFill +} +async function testDisc(entries_number) { + try { + var data = generate_data(entries_number); + var path = "/tmp/serverless-benchmark-test-file.json"; + var dataAsString = JSON.stringify(data); + var t0 = new Date(); + fs.writeFile(path, dataAsString) + var t1 = new Date(); + fs.readFile(path) + var t2 = new Date() + return { + "write_time" : t1 - t0, + "read_time": t2 - t1, + "bytes": dataAsString.length + } + } catch (error) { + return { "error": error.toString() } + } +}; +//#run +var entries_number = config.entries_number; +await testDisc(entries_number).then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/disc/async_nodejs/package.json b/benchmark_generator/disc/async_nodejs/package.json new file mode 100644 index 00000000..2c4ed0f7 --- /dev/null +++ b/benchmark_generator/disc/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "disc", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } + } + \ No newline at end of file diff --git a/benchmark_generator/function_input/async_nodejs/function.js b/benchmark_generator/function_input/async_nodejs/function.js new file mode 100644 index 00000000..734aefd4 --- /dev/null +++ b/benchmark_generator/function_input/async_nodejs/function.js @@ -0,0 +1,22 @@ +//#test +var config = { + "output_size": 100 +}; +var result = {}; +//#import +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +//#function +function fillDict(dictToFill, entries_number) { + try { + for(var i = 0;i < entries_number;i++) { + dictToFill[uuidv1().toString()] = uuidv1().toString() + } + return dictToFill + } catch (error) { + return {"Error": error.toString} + } +} +//#run +var number_of_entries = config["output_size"]; +fillDict(result, number_of_entries); \ No newline at end of file diff --git a/benchmark_generator/function_input/async_nodejs/package.json b/benchmark_generator/function_input/async_nodejs/package.json new file mode 100644 index 00000000..4e596af9 --- /dev/null +++ b/benchmark_generator/function_input/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } + } + \ No newline at end of file diff --git a/benchmark_generator/generator.py b/benchmark_generator/generator.py index 98814a4c..c7380523 100644 --- a/benchmark_generator/generator.py +++ b/benchmark_generator/generator.py @@ -9,24 +9,47 @@ print("Missing argument, path to config") with open(sys.argv[1]) as config_file: - config = json.load(config_file) + total_config = json.load(config_file) -# Generate directory for benchmark -path_to_benchmark = "./../benchmarks/600.generated/620.generated/python" -if not os.path.exists(path_to_benchmark): - os.makedirs(path_to_benchmark) +if total_config["language"] == "python": + config = total_config["config"] -# Push code to benchmarks/600.generated/620.generated/python/function.py + # Generate directory for benchmark + path_to_benchmark = "./../benchmarks/600.generated/620.generated/python" + if not os.path.exists(path_to_benchmark): + os.makedirs(path_to_benchmark) -with open(path_to_benchmark + "/function.py", "w+") as code_file: - code = code_composer.compose(config) - code_file.write(code) + # Push code to benchmarks/600.generated/620.generated/python/function.py -# Push requirements to benchmarks/600.generated/620.generated/python/requirements.txt -with open(path_to_benchmark + "/requirements.txt", "w+") as requirements_file: - requirements = requirements_composer.compose(config) - print("Req: " + requirements) - requirements_file.write(requirements) + with open(path_to_benchmark + "/function.py", "w+") as code_file: + code = code_composer.compose(config, "python") + code_file.write(code) + + # Push requirements to benchmarks/600.generated/620.generated/python/requirements.txt + with open(path_to_benchmark + "/requirements.txt", "w+") as requirements_file: + requirements = requirements_composer.compose(config) + print("Req: " + requirements) + requirements_file.write(requirements) + +elif total_config["language"] == "async_nodejs": + config = total_config["config"] + + # Generate directory for benchmark + path_to_benchmark = "./../benchmarks/600.generated/620.generated/nodejs" + if not os.path.exists(path_to_benchmark): + os.makedirs(path_to_benchmark) + + # Push code to benchmarks/600.generated/620.generated/nodejs/function.js + + with open(path_to_benchmark + "/function.js", "w+") as code_file: + code = code_composer.compose(config, "async_nodejs") + code_file.write(code) + + # Push requirements to benchmarks/600.generated/620.generated/nodejs/package.json + with open(path_to_benchmark + "/package.json", "w+") as requirements_file: + requirements = requirements_composer.compose(config, "async_nodejs") + print("Req: " + requirements) + requirements_file.write(requirements) # Create input.py file with open(path_to_benchmark + "/../input.py", "w+") as input_file: diff --git a/benchmark_generator/memory/async_nodejs/function.js b/benchmark_generator/memory/async_nodejs/function.js new file mode 100644 index 00000000..cc5c14cb --- /dev/null +++ b/benchmark_generator/memory/async_nodejs/function.js @@ -0,0 +1,24 @@ +//#test +var config = { + "size_in_bytes": 10485760 +}; +var result = {}; +var number = 0; +//#import +var math = require('mathjs'); +//#function +const testMemory = async (size) => { + var t0 = new Date(); + var a = math.ones([size / 8]); + var t1 = new Date(); + return { + "time": t1 - t0, + "size_in_bytes": size + } +}; +//#run +var array_size_in_bytes = config["size_in_bytes"]; +await testMemory(array_size_in_bytes).then(returnJson => { + result[number] = returnJson; + } +); diff --git a/benchmark_generator/memory/async_nodejs/package.json b/benchmark_generator/memory/async_nodejs/package.json new file mode 100644 index 00000000..b46a7b4a --- /dev/null +++ b/benchmark_generator/memory/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "mathjs": "7.0.2" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/network/async_nodejs/function.js b/benchmark_generator/network/async_nodejs/function.js new file mode 100644 index 00000000..aa71e4c9 --- /dev/null +++ b/benchmark_generator/network/async_nodejs/function.js @@ -0,0 +1,25 @@ +//#test +var result = {}; +var number = 0; +//#import +var rewire = require('rewire'); +var speedTest = rewire('speedtest-net'); +speedTest.__set__("__dirname", "/tmp") // must for AWS, since only /tmp is not read-only and lib is trying to save data in __dirname +//#function +const testNetwork = async () => { + var resultJson = {} + try { + await speedTest(options = {acceptLicense : true, acceptGdpr: true}).then(res => { + // resultJson["download"] = res.download.bandwidth; + // resultJson["upload"] = res.upload.bandwidth + }) + } catch (exception) { + resultJson["error"] = exception.toString() + } + return resultJson +}; +//#run +await testNetwork().then(returnJson => { + result[number] = returnJson; + } +) diff --git a/benchmark_generator/network/async_nodejs/package.json b/benchmark_generator/network/async_nodejs/package.json new file mode 100644 index 00000000..cadd329d --- /dev/null +++ b/benchmark_generator/network/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "speedtest-net": "2.1.1", + "rewire": "5.0.0" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/requirements_composer.py b/benchmark_generator/requirements_composer.py index 8bc91c06..9ecc941b 100644 --- a/benchmark_generator/requirements_composer.py +++ b/benchmark_generator/requirements_composer.py @@ -1,8 +1,9 @@ import os +import json -def load_benchmark_requirements(benchmark_name, language="python"): +def load_benchmark_requirements(benchmark_name): current_dir = os.getcwd() - path_to_requirements = os.path.join(current_dir, benchmark_name, language, "requirements.txt" if language == "python" else "sth.js") + path_to_requirements = os.path.join(current_dir, benchmark_name, "python", "requirements.txt") if os.path.exists(path_to_requirements) and os.path.isfile(path_to_requirements): with open(path_to_requirements, "r") as source_file: requirements = source_file.read() @@ -11,8 +12,7 @@ def load_benchmark_requirements(benchmark_name, language="python"): print("Path to: " + path_to_requirements + " doenst exist") return "" -def compose(config): - +def prepare_python_file(config): benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} requirements_for_all_benchmarks = "" @@ -20,3 +20,43 @@ def compose(config): requirements_for_all_benchmarks += "\n" + load_benchmark_requirements(benchmark_name) return requirements_for_all_benchmarks +def load_benchmark_dependencies(benchmark_name, language): + current_dir = os.getcwd() + path_to_dependencies = os.path.join(current_dir, benchmark_name, language, "package.json") + if os.path.exists(path_to_dependencies) and os.path.isfile(path_to_dependencies): + with open(path_to_dependencies, "r") as json_file: + package_json = json.load(json_file) + return (package_json["dependencies"], package_json["devDependencies"]) + else: + print("Path to: " + path_to_dependencies + " doenst exist") + return ({}, {}) + +def prepare_nodejs_file(config, language): + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + dependencies_list = [load_benchmark_dependencies(benchmark_name, language) for benchmark_name in benchmarks_list] + + dependencies = {} + dev_dependencies = {} + for dependency, dev_dependency in dependencies_list: + dependencies.update(dependency) + dev_dependencies.update(dev_dependency) + + return json.dumps({ + "name": "generated_benchmark", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": dependencies, + "devDependencies": dev_dependencies + }) + + +def compose(config, language): + if language == "python": + return prepare_python_file(config) + else: + return prepare_nodejs_file(config, language) + + diff --git a/benchmark_generator/sleep/async_nodejs/function.js b/benchmark_generator/sleep/async_nodejs/function.js new file mode 100644 index 00000000..809d211c --- /dev/null +++ b/benchmark_generator/sleep/async_nodejs/function.js @@ -0,0 +1,17 @@ +//#test +var config = { + "duration": 100 +}; +var result = {}; +var number = 0; +//#import +//#function +const sleep = async time => { + setTimeout(() => {}, time) + return { + "sleep_time": time + } +}; +//#run +var sleep_time = config["duration"]; +await sleep(sleep_time).then(resJson => {result[number] = resJson}); diff --git a/benchmark_generator/sleep/async_nodejs/package.json b/benchmark_generator/sleep/async_nodejs/package.json new file mode 100644 index 00000000..af40778b --- /dev/null +++ b/benchmark_generator/sleep/async_nodejs/package.json @@ -0,0 +1,11 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/storage/async_nodejs/function.js b/benchmark_generator/storage/async_nodejs/function.js new file mode 100644 index 00000000..9d73c417 --- /dev/null +++ b/benchmark_generator/storage/async_nodejs/function.js @@ -0,0 +1,76 @@ +//#test +var result = {}; +var config = { + "entries_number": 1000 +}; +var number = 0; +var event = {}; +//#import +var storage = require('./storage'); +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +var { Readable } = require("stream") +//#function +function generate_data(entries_number) { + var dictToFill = {}; + for(var i = 0;i < entries_number;i++) { + dictToFill[uuidv1()] = uuidv1() + } + return dictToFill +} +function streamToPromise(stream) { + return new Promise(function(resolve, reject) { + stream.on("close", () => { + resolve(); + }); + stream.on("error", reject); + }) + } +async function testBucketStorage(dataAsDict, bucket_config) { + var [client, bucket] = bucket_config; + var dataAsString = JSON.stringify(dataAsDict); + var inputStream = Readable.from(dataAsString); + var result = {}; + var t0 = new Date() + var [writeStream, uploadPromise, storageKey] = client.uploadStream(bucket, "serverless-benchmark-data.json") + inputStream.pipe(writeStream) + await uploadPromise.then(async () => { + var t1 = new Date() + read_promise = client.downloadStream(bucket, storageKey) + await read_promise.then(async (stream) => { + await (streamToPromise(stream).then((any) => { + var t2 = new Date(); + result = { + "uploaded_to_bucket_bytes": dataAsString.length, + "upload_time": t1 - t0, + "downloaded_from_bucket_bytes": dataAsString.length, + "download_time": t2 - t1, + "key": storageKey + } + })) + }) + }) + return result; +} +async function testStorage(entries_number, bucket_config, storage_type) { + try { + var data = generate_data(entries_number); + if(storage_type == "bucket") { + var res = {} + await testBucketStorage(data, bucket_config).then((resJson) => res = resJson) + return res; + } + return { "error": "unknown storage"} + } catch (error) { + return { "error": error.toString() } + } +}; +//#run +var output_bucket = event.bucket.output; +var entries_number = config.entries_number; +let client = new storage.storage(); +var bucket_config = [client, output_bucket]; +await testStorage(entries_number, bucket_config, "bucket").then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/storage/async_nodejs/package.json b/benchmark_generator/storage/async_nodejs/package.json new file mode 100644 index 00000000..171182df --- /dev/null +++ b/benchmark_generator/storage/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/storage/python/function.py b/benchmark_generator/storage/python/function.py index 31dd367e..5e700eef 100644 --- a/benchmark_generator/storage/python/function.py +++ b/benchmark_generator/storage/python/function.py @@ -55,8 +55,7 @@ def test_storage(dict_to_upload, config, storage_type="bucket"): elif True: # not implemented pass - return {} - + return {} #run output_bucket = event.get('bucket').get('output') entries_number = config.get("entries_number", 10) diff --git a/benchmark_generator/workload/async_nodejs/function.js b/benchmark_generator/workload/async_nodejs/function.js new file mode 100644 index 00000000..e2058423 --- /dev/null +++ b/benchmark_generator/workload/async_nodejs/function.js @@ -0,0 +1,35 @@ +//#test +var config = { + "iterations": 10000, + "operator": "-", + "array_size": 10000 +}; +var result = {}; +var number = 0; +//#import +var math = require('mathjs'); +//#function +const performCalculations = async (iterations, operator, array_size) => { + let scope = { + a : math.ones([array_size]), + b : math.ones([array_size]) + }; + var t0 = new Date(); + for (var i = 0; i < iterations;i++) { + var c = math.evaluate("a " + operator + " b", scope); + } + var t1 = new Date(); + return { + "number_of_operations": iterations * array_size, + "dtype": "float64", + "time": t1 - t0 + } +}; +//#run +var iterations = config["iterations"]; +var operator = config["operator"]; +var array_size = config["array_size"]; +await performCalculations(iterations, operator, array_size).then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/workload/async_nodejs/package.json b/benchmark_generator/workload/async_nodejs/package.json new file mode 100644 index 00000000..b46a7b4a --- /dev/null +++ b/benchmark_generator/workload/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "mathjs": "7.0.2" + }, + "devDependencies": { + } +} From 38dbb8f38f5d243aeb520c74db061104391e76b6 Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Sat, 11 Jul 2020 14:17:42 +0200 Subject: [PATCH 12/13] unify config in disc benchmark --- .../disc/async_nodejs/function.js | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/benchmark_generator/disc/async_nodejs/function.js b/benchmark_generator/disc/async_nodejs/function.js index 8d85d4bb..fbf323fa 100644 --- a/benchmark_generator/disc/async_nodejs/function.js +++ b/benchmark_generator/disc/async_nodejs/function.js @@ -1,7 +1,7 @@ //#test var result = {}; var config = { - "entries_number": 1000 + "block_size": 1024*1024*128 }; var number = 0; //#import @@ -9,35 +9,30 @@ var fs = require('fs'); var uuid = require('uuid'); var uuidv1 = uuid.v1; //#function -function generate_data(entries_number) { - var dictToFill = {}; - for(var i = 0;i < entries_number;i++) { - dictToFill[uuidv1()] = uuidv1() - } - return dictToFill +function generate_data_disc(block_size) { + return Array(block_size + 1).join('x') } -async function testDisc(entries_number) { +async function testDisc(block_size) { try { - var data = generate_data(entries_number); + var data = generate_data_disc(entries_number); var path = "/tmp/serverless-benchmark-test-file.json"; - var dataAsString = JSON.stringify(data); var t0 = new Date(); - fs.writeFile(path, dataAsString) + fs.writeFileSync(path, data); var t1 = new Date(); - fs.readFile(path) - var t2 = new Date() + await fs.readFile(path); + var t2 = new Date(); return { "write_time" : t1 - t0, "read_time": t2 - t1, - "bytes": dataAsString.length + "bytes": block_size } } catch (error) { return { "error": error.toString() } } }; //#run -var entries_number = config.entries_number; -await testDisc(entries_number).then(returnJson => { +var block_size = config.block_size; +await testDisc(block_size).then(returnJson => { result[number] = returnJson; } ); \ No newline at end of file From b3a9f2d3c8da7bf3c59145580800c5956354e38d Mon Sep 17 00:00:00 2001 From: Rafal Mucha Date: Sat, 11 Jul 2020 14:25:33 +0200 Subject: [PATCH 13/13] unify sleep config --- benchmark_generator/sleep/async_nodejs/function.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmark_generator/sleep/async_nodejs/function.js b/benchmark_generator/sleep/async_nodejs/function.js index 809d211c..82535803 100644 --- a/benchmark_generator/sleep/async_nodejs/function.js +++ b/benchmark_generator/sleep/async_nodejs/function.js @@ -7,7 +7,7 @@ var number = 0; //#import //#function const sleep = async time => { - setTimeout(() => {}, time) + setTimeout(() => {}, time * 1000) // ms to s return { "sleep_time": time }