diff --git a/.gitignore b/.gitignore index e707ac85..25e78028 100644 --- a/.gitignore +++ b/.gitignore @@ -170,3 +170,16 @@ dmypy.json sebs-* # cache cache + +# generated benchmarks +benchmarks/600.generated + +# csv +*.csv + +# generated code by sebs +scripts/code +scripts/experiments.json + +# +node_modules/ \ No newline at end of file diff --git a/benchmark_generator/code_composer.py b/benchmark_generator/code_composer.py new file mode 100644 index 00000000..c5a2c90c --- /dev/null +++ b/benchmark_generator/code_composer.py @@ -0,0 +1,107 @@ +import os +import uuid + +def load_benchmark_code(benchmark_name, language="python"): + current_dir = os.getcwd() + path_to_code = os.path.join(current_dir, benchmark_name, language, "function.py" if language == "python" else "function.js") + if os.path.exists(path_to_code): + with open(path_to_code, "r") as source_file: + source_code = source_file.read() + [_, after_test] = source_code.split("#test") + [_, after_import] = after_test.split("#import") + [import_part, after_function] = after_import.split("#function") + [function_part, run_part] = after_function.split("#run") + return { + "import": import_part, + "function": function_part, + "run": run_part + } + else: + print("Path: " + path_to_code + " not exist") + return { + "import": "", + "function": "", + "run": "" + } + +def intend(body): + new_body = "" + for line in body.splitlines(): + new_body += "\n\t" + line + return new_body + +def generate_huge_dict(number_of_elements): + return { + str(uuid.uuid1()) + "-" + str(i): str(uuid.uuid1()) for i in range(number_of_elements) # uuid has more predictible size than plain numbers + } + +def generate_python_handler(config, code_maps): + code = "\ndef handler(event):\n" + #add invoke of benchmarks + handler_function = "result = {}\n" + for (number, (benchmark_name, benchmark_config)) in enumerate(config): + handler_function += "\nnumber = " + str(number) + "\n" + handler_function += "config = " + str(benchmark_config) + "\n" + handler_function += code_maps[benchmark_name]["run"] + + if benchmark_name == "artificial_code": + number_of_elements = benchmark_config.get("number_of_elements", 0) + handler_function += "artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + + handler_function += """\nreturn {'result': result }""" # dummy result, different doesn't work + + code += intend(handler_function) + + return code + + +def generate_async_nodejs_handler(config, code_maps): + code = "\nexports.handler = async function(event) {\n" + #add invoke of benchmarks + handler_function = """var result = {};\nawait (async () => { return [result, 0] })()""" + for (number, (benchmark_name, benchmark_config)) in enumerate(config): + handler_function += ".then(async ([result, number]) => {\n" + inner_function = "var config = " + str(benchmark_config) + ";\n" + inner_function += code_maps[benchmark_name]["run"] + "\n" + inner_function += "return [result, number + 1]\n" + handler_function += intend(inner_function) + handler_function += "\n})\n" + + + if benchmark_name == "artificial_code": + number_of_elements = benchmark_config.get("number_of_elements", 0) + handler_function += "var artificial_dict" + str(number) + " = " + str(generate_huge_dict(number_of_elements)) + ";" + + handler_function += """\nreturn {'result': result }\n}""" + + code += intend(handler_function) + + return code + +def compose(config, language): + + code = "" + + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + # load code of benchmarks + code_maps = { + benchmark_name: load_benchmark_code(benchmark_name, language) for benchmark_name in benchmarks_list + } + + # add imports + for code_map in code_maps.values(): + code += code_map["import"] + "\n" # todo not so easy for nodejs- twice imports are not possible + + #add functions + for code_map in code_maps.values(): + code += code_map["function"] + "\n" + + if language == "python": + return code + generate_python_handler(config, code_maps) + elif language == "async_nodejs": + return code + generate_async_nodejs_handler(config, code_maps) + else: + return "" + + diff --git a/benchmark_generator/disc/async_nodejs/function.js b/benchmark_generator/disc/async_nodejs/function.js new file mode 100644 index 00000000..fbf323fa --- /dev/null +++ b/benchmark_generator/disc/async_nodejs/function.js @@ -0,0 +1,38 @@ +//#test +var result = {}; +var config = { + "block_size": 1024*1024*128 +}; +var number = 0; +//#import +var fs = require('fs'); +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +//#function +function generate_data_disc(block_size) { + return Array(block_size + 1).join('x') +} +async function testDisc(block_size) { + try { + var data = generate_data_disc(entries_number); + var path = "/tmp/serverless-benchmark-test-file.json"; + var t0 = new Date(); + fs.writeFileSync(path, data); + var t1 = new Date(); + await fs.readFile(path); + var t2 = new Date(); + return { + "write_time" : t1 - t0, + "read_time": t2 - t1, + "bytes": block_size + } + } catch (error) { + return { "error": error.toString() } + } +}; +//#run +var block_size = config.block_size; +await testDisc(block_size).then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/disc/async_nodejs/package.json b/benchmark_generator/disc/async_nodejs/package.json new file mode 100644 index 00000000..2c4ed0f7 --- /dev/null +++ b/benchmark_generator/disc/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "disc", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } + } + \ No newline at end of file diff --git a/benchmark_generator/disc/python/function.py b/benchmark_generator/disc/python/function.py new file mode 100644 index 00000000..2be7af48 --- /dev/null +++ b/benchmark_generator/disc/python/function.py @@ -0,0 +1,31 @@ +#test +config = { + "block_size": 1024*1024*128 +} +result = {} +number = 0 +#import +import numpy as np +import time +import uuid +import os +#function +def test_disc(block_size, file_name): + a = np.ones(int(block_size / 4), dtype=np.dtype("int32")) * 2 + t0 = time.clock() + np.save(file_name, a) + t1 = time.clock() + t2 = time.clock() + np.load(file_name) + t3 = time.clock() + + write_time = t1 - t0 + read_time = t3 - t2 + return {"block_size": block_size, + "write_time": write_time, + "read_time": read_time} +#run +block_size = config.get("block_size", 100) +file_name = "/tmp/sebs.npy" +result[str(number)] = test_disc(block_size, file_name) +print(result) diff --git a/benchmark_generator/function_input/async_nodejs/function.js b/benchmark_generator/function_input/async_nodejs/function.js new file mode 100644 index 00000000..734aefd4 --- /dev/null +++ b/benchmark_generator/function_input/async_nodejs/function.js @@ -0,0 +1,22 @@ +//#test +var config = { + "output_size": 100 +}; +var result = {}; +//#import +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +//#function +function fillDict(dictToFill, entries_number) { + try { + for(var i = 0;i < entries_number;i++) { + dictToFill[uuidv1().toString()] = uuidv1().toString() + } + return dictToFill + } catch (error) { + return {"Error": error.toString} + } +} +//#run +var number_of_entries = config["output_size"]; +fillDict(result, number_of_entries); \ No newline at end of file diff --git a/benchmark_generator/function_input/async_nodejs/package.json b/benchmark_generator/function_input/async_nodejs/package.json new file mode 100644 index 00000000..4e596af9 --- /dev/null +++ b/benchmark_generator/function_input/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } + } + \ No newline at end of file diff --git a/benchmark_generator/function_input/python/function.py b/benchmark_generator/function_input/python/function.py new file mode 100644 index 00000000..e81af07f --- /dev/null +++ b/benchmark_generator/function_input/python/function.py @@ -0,0 +1,16 @@ +#test +config = { + "output_size": 100 +} +result = {} +#import +import uuid +#function +def fill_dict(dict_to_fill, number_of_entries): + for i in range(number_of_entries): + dict_to_fill[str(uuid.uuid1())] = str(uuid.uuid1()) + +#run +number_of_entries = config.get("output_size") +fill_dict(result, number_of_entries) +print(result) \ No newline at end of file diff --git a/benchmark_generator/generator.py b/benchmark_generator/generator.py new file mode 100644 index 00000000..c7380523 --- /dev/null +++ b/benchmark_generator/generator.py @@ -0,0 +1,57 @@ +import sys +import json +import code_composer +import requirements_composer +import input_composer +import os + +if len(sys.argv) < 2: + print("Missing argument, path to config") + +with open(sys.argv[1]) as config_file: + total_config = json.load(config_file) + +if total_config["language"] == "python": + config = total_config["config"] + + # Generate directory for benchmark + path_to_benchmark = "./../benchmarks/600.generated/620.generated/python" + if not os.path.exists(path_to_benchmark): + os.makedirs(path_to_benchmark) + + # Push code to benchmarks/600.generated/620.generated/python/function.py + + with open(path_to_benchmark + "/function.py", "w+") as code_file: + code = code_composer.compose(config, "python") + code_file.write(code) + + # Push requirements to benchmarks/600.generated/620.generated/python/requirements.txt + with open(path_to_benchmark + "/requirements.txt", "w+") as requirements_file: + requirements = requirements_composer.compose(config) + print("Req: " + requirements) + requirements_file.write(requirements) + +elif total_config["language"] == "async_nodejs": + config = total_config["config"] + + # Generate directory for benchmark + path_to_benchmark = "./../benchmarks/600.generated/620.generated/nodejs" + if not os.path.exists(path_to_benchmark): + os.makedirs(path_to_benchmark) + + # Push code to benchmarks/600.generated/620.generated/nodejs/function.js + + with open(path_to_benchmark + "/function.js", "w+") as code_file: + code = code_composer.compose(config, "async_nodejs") + code_file.write(code) + + # Push requirements to benchmarks/600.generated/620.generated/nodejs/package.json + with open(path_to_benchmark + "/package.json", "w+") as requirements_file: + requirements = requirements_composer.compose(config, "async_nodejs") + print("Req: " + requirements) + requirements_file.write(requirements) + +# Create input.py file +with open(path_to_benchmark + "/../input.py", "w+") as input_file: + code = input_composer.compose(config) + input_file.write(code) \ No newline at end of file diff --git a/benchmark_generator/input_composer.py b/benchmark_generator/input_composer.py new file mode 100644 index 00000000..261e35d4 --- /dev/null +++ b/benchmark_generator/input_composer.py @@ -0,0 +1,37 @@ +import uuid + +def compose(config): + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + input_dict = {} + print(config) + for (benchmark, benchmark_config) in config: + if benchmark == "function_input" and "input_size" in benchmark_config.keys(): + # input size is measured by number of elements + for i in range(int(benchmark_config["input_size"])): + input_dict[str(uuid.uuid1())] = 100 + + # add needed values + + # generate code + code = "" + code += "input_dict = " + str(input_dict) + "\n" + + if "storage" in benchmarks_list: + code += """def buckets_count(): + return (0, 1)\n""" + else: + code += """def buckets_count(): + return (0, 0)\n""" + + if "storage" in benchmarks_list: + code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): + input_dict = {'bucket': {}} + input_dict['bucket']['output'] = output_buckets[0] + return input_dict """ + else: + code += """def generate_input(data_dir, size, input_buckets, output_buckets, upload_func): + return input_dict """ + return code + + \ No newline at end of file diff --git a/benchmark_generator/memory/async_nodejs/function.js b/benchmark_generator/memory/async_nodejs/function.js new file mode 100644 index 00000000..cc5c14cb --- /dev/null +++ b/benchmark_generator/memory/async_nodejs/function.js @@ -0,0 +1,24 @@ +//#test +var config = { + "size_in_bytes": 10485760 +}; +var result = {}; +var number = 0; +//#import +var math = require('mathjs'); +//#function +const testMemory = async (size) => { + var t0 = new Date(); + var a = math.ones([size / 8]); + var t1 = new Date(); + return { + "time": t1 - t0, + "size_in_bytes": size + } +}; +//#run +var array_size_in_bytes = config["size_in_bytes"]; +await testMemory(array_size_in_bytes).then(returnJson => { + result[number] = returnJson; + } +); diff --git a/benchmark_generator/memory/async_nodejs/package.json b/benchmark_generator/memory/async_nodejs/package.json new file mode 100644 index 00000000..b46a7b4a --- /dev/null +++ b/benchmark_generator/memory/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "mathjs": "7.0.2" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/memory/python/function.py b/benchmark_generator/memory/python/function.py new file mode 100644 index 00000000..77809ccf --- /dev/null +++ b/benchmark_generator/memory/python/function.py @@ -0,0 +1,22 @@ +#test +config = { + "size_in_bytes": 1024 * 1024 +} +result = {} +number = 0 +#import +import numpy as np +import time +#function +def allocate(size_in_bytes): + t0 = time.clock() + arr = np.ones(int(size_in_bytes/4), dtype=np.dtype("int32")) + t1 = time.clock() + return { + "time": t1 - t0, + "size_in_bytes": size_in_bytes + } +#run +size_of_allocated_memory = config.get("size_in_bytes", 1024 * 1024) # Default 1 MB +result[str(number)] = (allocate(size_of_allocated_memory)) +print(result) diff --git a/benchmark_generator/memory/python/requirements.txt b/benchmark_generator/memory/python/requirements.txt new file mode 100644 index 00000000..683f00cc --- /dev/null +++ b/benchmark_generator/memory/python/requirements.txt @@ -0,0 +1 @@ +numpy==1.18.5 \ No newline at end of file diff --git a/benchmark_generator/network/async_nodejs/function.js b/benchmark_generator/network/async_nodejs/function.js new file mode 100644 index 00000000..aa71e4c9 --- /dev/null +++ b/benchmark_generator/network/async_nodejs/function.js @@ -0,0 +1,25 @@ +//#test +var result = {}; +var number = 0; +//#import +var rewire = require('rewire'); +var speedTest = rewire('speedtest-net'); +speedTest.__set__("__dirname", "/tmp") // must for AWS, since only /tmp is not read-only and lib is trying to save data in __dirname +//#function +const testNetwork = async () => { + var resultJson = {} + try { + await speedTest(options = {acceptLicense : true, acceptGdpr: true}).then(res => { + // resultJson["download"] = res.download.bandwidth; + // resultJson["upload"] = res.upload.bandwidth + }) + } catch (exception) { + resultJson["error"] = exception.toString() + } + return resultJson +}; +//#run +await testNetwork().then(returnJson => { + result[number] = returnJson; + } +) diff --git a/benchmark_generator/network/async_nodejs/package.json b/benchmark_generator/network/async_nodejs/package.json new file mode 100644 index 00000000..cadd329d --- /dev/null +++ b/benchmark_generator/network/async_nodejs/package.json @@ -0,0 +1,13 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "speedtest-net": "2.1.1", + "rewire": "5.0.0" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/network/python/function.py b/benchmark_generator/network/python/function.py new file mode 100644 index 00000000..8517e6d3 --- /dev/null +++ b/benchmark_generator/network/python/function.py @@ -0,0 +1,13 @@ +#test +result = {} +number = 0 +#import +import speedtest +#function +def test_network(): + s = speedtest.Speedtest() + return {"upload": s.upload(), + "download": s.download()} +#run +result[str(number)] = test_network() +print(result) diff --git a/benchmark_generator/network/python/requirements.txt b/benchmark_generator/network/python/requirements.txt new file mode 100644 index 00000000..74f935b6 --- /dev/null +++ b/benchmark_generator/network/python/requirements.txt @@ -0,0 +1 @@ +speedtest-cli==2.1.2 \ No newline at end of file diff --git a/benchmark_generator/requirements_composer.py b/benchmark_generator/requirements_composer.py new file mode 100644 index 00000000..9ecc941b --- /dev/null +++ b/benchmark_generator/requirements_composer.py @@ -0,0 +1,62 @@ +import os +import json + +def load_benchmark_requirements(benchmark_name): + current_dir = os.getcwd() + path_to_requirements = os.path.join(current_dir, benchmark_name, "python", "requirements.txt") + if os.path.exists(path_to_requirements) and os.path.isfile(path_to_requirements): + with open(path_to_requirements, "r") as source_file: + requirements = source_file.read() + return requirements + else: + print("Path to: " + path_to_requirements + " doenst exist") + return "" + +def prepare_python_file(config): + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + requirements_for_all_benchmarks = "" + for benchmark_name in benchmarks_list: + requirements_for_all_benchmarks += "\n" + load_benchmark_requirements(benchmark_name) + return requirements_for_all_benchmarks + +def load_benchmark_dependencies(benchmark_name, language): + current_dir = os.getcwd() + path_to_dependencies = os.path.join(current_dir, benchmark_name, language, "package.json") + if os.path.exists(path_to_dependencies) and os.path.isfile(path_to_dependencies): + with open(path_to_dependencies, "r") as json_file: + package_json = json.load(json_file) + return (package_json["dependencies"], package_json["devDependencies"]) + else: + print("Path to: " + path_to_dependencies + " doenst exist") + return ({}, {}) + +def prepare_nodejs_file(config, language): + benchmarks_list = {benchmark for (benchmark, benchmark_config) in config} + + dependencies_list = [load_benchmark_dependencies(benchmark_name, language) for benchmark_name in benchmarks_list] + + dependencies = {} + dev_dependencies = {} + for dependency, dev_dependency in dependencies_list: + dependencies.update(dependency) + dev_dependencies.update(dev_dependency) + + return json.dumps({ + "name": "generated_benchmark", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": dependencies, + "devDependencies": dev_dependencies + }) + + +def compose(config, language): + if language == "python": + return prepare_python_file(config) + else: + return prepare_nodejs_file(config, language) + + diff --git a/benchmark_generator/sleep/async_nodejs/function.js b/benchmark_generator/sleep/async_nodejs/function.js new file mode 100644 index 00000000..82535803 --- /dev/null +++ b/benchmark_generator/sleep/async_nodejs/function.js @@ -0,0 +1,17 @@ +//#test +var config = { + "duration": 100 +}; +var result = {}; +var number = 0; +//#import +//#function +const sleep = async time => { + setTimeout(() => {}, time * 1000) // ms to s + return { + "sleep_time": time + } +}; +//#run +var sleep_time = config["duration"]; +await sleep(sleep_time).then(resJson => {result[number] = resJson}); diff --git a/benchmark_generator/sleep/async_nodejs/package.json b/benchmark_generator/sleep/async_nodejs/package.json new file mode 100644 index 00000000..af40778b --- /dev/null +++ b/benchmark_generator/sleep/async_nodejs/package.json @@ -0,0 +1,11 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/sleep/python/function.py b/benchmark_generator/sleep/python/function.py new file mode 100644 index 00000000..86bd3834 --- /dev/null +++ b/benchmark_generator/sleep/python/function.py @@ -0,0 +1,14 @@ +#test +config = { + "duration": 100 +} +number = 0 +result = {} +#import +from time import sleep +#function + +#run +sleep_time = config.get('duration') +sleep(sleep_time) +result[str(number)] = { 'sleep_time': sleep_time } \ No newline at end of file diff --git a/benchmark_generator/storage/async_nodejs/function.js b/benchmark_generator/storage/async_nodejs/function.js new file mode 100644 index 00000000..9d73c417 --- /dev/null +++ b/benchmark_generator/storage/async_nodejs/function.js @@ -0,0 +1,76 @@ +//#test +var result = {}; +var config = { + "entries_number": 1000 +}; +var number = 0; +var event = {}; +//#import +var storage = require('./storage'); +var uuid = require('uuid'); +var uuidv1 = uuid.v1; +var { Readable } = require("stream") +//#function +function generate_data(entries_number) { + var dictToFill = {}; + for(var i = 0;i < entries_number;i++) { + dictToFill[uuidv1()] = uuidv1() + } + return dictToFill +} +function streamToPromise(stream) { + return new Promise(function(resolve, reject) { + stream.on("close", () => { + resolve(); + }); + stream.on("error", reject); + }) + } +async function testBucketStorage(dataAsDict, bucket_config) { + var [client, bucket] = bucket_config; + var dataAsString = JSON.stringify(dataAsDict); + var inputStream = Readable.from(dataAsString); + var result = {}; + var t0 = new Date() + var [writeStream, uploadPromise, storageKey] = client.uploadStream(bucket, "serverless-benchmark-data.json") + inputStream.pipe(writeStream) + await uploadPromise.then(async () => { + var t1 = new Date() + read_promise = client.downloadStream(bucket, storageKey) + await read_promise.then(async (stream) => { + await (streamToPromise(stream).then((any) => { + var t2 = new Date(); + result = { + "uploaded_to_bucket_bytes": dataAsString.length, + "upload_time": t1 - t0, + "downloaded_from_bucket_bytes": dataAsString.length, + "download_time": t2 - t1, + "key": storageKey + } + })) + }) + }) + return result; +} +async function testStorage(entries_number, bucket_config, storage_type) { + try { + var data = generate_data(entries_number); + if(storage_type == "bucket") { + var res = {} + await testBucketStorage(data, bucket_config).then((resJson) => res = resJson) + return res; + } + return { "error": "unknown storage"} + } catch (error) { + return { "error": error.toString() } + } +}; +//#run +var output_bucket = event.bucket.output; +var entries_number = config.entries_number; +let client = new storage.storage(); +var bucket_config = [client, output_bucket]; +await testStorage(entries_number, bucket_config, "bucket").then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/storage/async_nodejs/package.json b/benchmark_generator/storage/async_nodejs/package.json new file mode 100644 index 00000000..171182df --- /dev/null +++ b/benchmark_generator/storage/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "uuid": "8.2.0" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/storage/python/function.py b/benchmark_generator/storage/python/function.py new file mode 100644 index 00000000..5e700eef --- /dev/null +++ b/benchmark_generator/storage/python/function.py @@ -0,0 +1,66 @@ +#test +result = {} +config = { + "entries_number": 1000 +} +number = 0 +event = {} +#import +from . import storage +import uuid +import time +import traceback +import io +#function +def generate_data(entries_number): + dict_to_fill = {} + for i in range(entries_number): + dict_to_fill[str(uuid.uuid1())] = str(uuid.uuid1()) + return dict_to_fill + +def upload_to_bucket(config, bytes_buffer): + (client, output_bucket) = config + try: + key_name = client.upload_stream(output_bucket, "sebs_test.sth", bytes_buffer) # WARN- name has to have extension! Look at storage implementation + except Exception as inst: + key_name = str(inst) + "\n" + traceback.format_exc() + return key_name + +def download_from_bucket(config, file_key): + (client, output_bucket) = config + buffer = client.download_stream(output_bucket, file_key) + downloaded_size = len(buffer.tobytes()) + return downloaded_size + +def test_bucket_like(config, dict_to_upload): + string_to_upload = str(dict_to_upload) + bytes_to_upload = str.encode(string_to_upload) + buffer_to_upload = io.BytesIO(bytes_to_upload) + t0 = time.clock() + key = upload_to_bucket(config, buffer_to_upload) + t1 = time.clock() + downloaded_bytes = download_from_bucket(config, key) + t2 = time.clock() + return { + "uploaded_to_bucket_bytes": len(bytes_to_upload), + "upload_time": t1 - t0, + "downloaded_from_bucket_bytes": downloaded_bytes, + "download_time": t2 - t1, + "key": key + } + +def test_storage(dict_to_upload, config, storage_type="bucket"): + if storage_type == "bucket": + return test_bucket_like(config, dict_to_upload) + elif True: + # not implemented + pass + return {} +#run +output_bucket = event.get('bucket').get('output') +entries_number = config.get("entries_number", 10) +client = storage.storage.get_instance() +dict_to_upload = generate_data(entries_number) +bucket_config = (client, output_bucket) +result[str(number)] = test_storage(dict_to_upload, bucket_config) +print(result) diff --git a/benchmark_generator/workload/async_nodejs/function.js b/benchmark_generator/workload/async_nodejs/function.js new file mode 100644 index 00000000..e2058423 --- /dev/null +++ b/benchmark_generator/workload/async_nodejs/function.js @@ -0,0 +1,35 @@ +//#test +var config = { + "iterations": 10000, + "operator": "-", + "array_size": 10000 +}; +var result = {}; +var number = 0; +//#import +var math = require('mathjs'); +//#function +const performCalculations = async (iterations, operator, array_size) => { + let scope = { + a : math.ones([array_size]), + b : math.ones([array_size]) + }; + var t0 = new Date(); + for (var i = 0; i < iterations;i++) { + var c = math.evaluate("a " + operator + " b", scope); + } + var t1 = new Date(); + return { + "number_of_operations": iterations * array_size, + "dtype": "float64", + "time": t1 - t0 + } +}; +//#run +var iterations = config["iterations"]; +var operator = config["operator"]; +var array_size = config["array_size"]; +await performCalculations(iterations, operator, array_size).then(returnJson => { + result[number] = returnJson; + } +); \ No newline at end of file diff --git a/benchmark_generator/workload/async_nodejs/package.json b/benchmark_generator/workload/async_nodejs/package.json new file mode 100644 index 00000000..b46a7b4a --- /dev/null +++ b/benchmark_generator/workload/async_nodejs/package.json @@ -0,0 +1,12 @@ +{ + "name": "sleep", + "version": "1.0.0", + "description": "", + "author": "", + "license": "", + "dependencies": { + "mathjs": "7.0.2" + }, + "devDependencies": { + } +} diff --git a/benchmark_generator/workload/python/function.py b/benchmark_generator/workload/python/function.py new file mode 100644 index 00000000..1552d52c --- /dev/null +++ b/benchmark_generator/workload/python/function.py @@ -0,0 +1,37 @@ +#test +config = { + "iterations": 1000000, + "operator": "-", + "type": "float32", + "array_size": 10000 +} +result = {} +number = 0 +#import +import numpy as np +import time +import operator as op +#function +def workload(number_of_iterations, dtype, array_size, operator): + a = np.ones(array_size, dtype=dtype) * 2 + b = np.ones(array_size, dtype=dtype) * 3 + t0 = time.clock() + for i in range(number_of_iterations): + c = operator(a, b) + t1 = time.clock() + return {"number_of_operations": number_of_iterations * array_size, + "dtype": str(dtype), + "time": t1 - t0} +#run +string_to_operator = { + "+": op.add, + "-": op.sub, + "*": op.mul, + "/": op.truediv, +} +element_type = np.dtype(config.get("type", np.float)) +number_of_iterations = config.get("iterations", 10000) +array_size = config.get("array_size", 100) +operator = string_to_operator[config.get("operator", "+")] +result[str(number)] = (workload(number_of_iterations, element_type, array_size, operator)) +print(result) diff --git a/benchmark_generator/workload/python/requirements.txt b/benchmark_generator/workload/python/requirements.txt new file mode 100644 index 00000000..683f00cc --- /dev/null +++ b/benchmark_generator/workload/python/requirements.txt @@ -0,0 +1 @@ +numpy==1.18.5 \ No newline at end of file diff --git a/config/generator_config.json b/config/generator_config.json new file mode 100644 index 00000000..b98c45ef --- /dev/null +++ b/config/generator_config.json @@ -0,0 +1,61 @@ +[ + [ + "workload", + { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ], + [ + "memory", + { + "size_in_bytes": 1048576 + } + ], + [ + "workload", + { + "iterations": 10000, + "operator": "-", + "type": "float32", + "array_size": 1000 + } + ], + [ + "function_input", + { + "input_size":10, + "output_size":0 + } + ], + [ + "network", + {} + ], + [ + "disc", + { + "block_size": 100000000 + } + ], + [ + "storage", + { + "entries_number": 1000000 + } + ], + [ + "sleep", + { + "duration": 1 + } + ], + [ + "artificial_code", + { + "number_of_elements": 1 + } + ] +] \ No newline at end of file diff --git a/generate.sh b/generate.sh new file mode 100755 index 00000000..91a9bb83 --- /dev/null +++ b/generate.sh @@ -0,0 +1,2 @@ +cd benchmark_generator +python3 generator.py ./../config/generator_config.json \ No newline at end of file