diff --git a/benchmarks/000.microbenchmarks/010.sleep/python/function.py b/benchmarks/000.microbenchmarks/010.sleep/python/function.py index 7dda59a5..67a59c0b 100644 --- a/benchmarks/000.microbenchmarks/010.sleep/python/function.py +++ b/benchmarks/000.microbenchmarks/010.sleep/python/function.py @@ -1,9 +1,13 @@ - from time import sleep def handler(event): # start timing - sleep_time = event.get('sleep') + sleep_time = event.get('sleep', None) + if sleep_time is None: + return { "status": "failure", "result": "Error: Key 'sleep' not found on input data." } + elif not isinstance(sleep_time, (int, float)): + return { "status": "failure", "result": "Error: Unexpected type for 'sleep' (expected int or float)"} + sleep(sleep_time) - return { 'result': sleep_time } + return { "status": "success", "result": "Returned with no error", "measurement": sleep_time } diff --git a/benchmarks/000.microbenchmarks/020.network-benchmark/python/function.py b/benchmarks/000.microbenchmarks/020.network-benchmark/python/function.py index 340d4f40..00c7eb94 100644 --- a/benchmarks/000.microbenchmarks/020.network-benchmark/python/function.py +++ b/benchmarks/000.microbenchmarks/020.network-benchmark/python/function.py @@ -1,20 +1,36 @@ import csv -import json import socket from datetime import datetime -from time import sleep +from jsonschema import validate from . import storage def handler(event): + schema = { + "type": "object", + "required": [ "request_id", "server-address", "server-port", "repetitions", "output-bucket" ], + "properties": { + "request-id": {"type": "integer"}, + "server-address": {"type": "integer"}, + "server-port": {"type": "integer"}, + "repetitions": {"type": "integer"}, + "output-bucket": {"type": "object"} + } + } + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + request_id = event['request-id'] address = event['server-address'] port = event['server-port'] repetitions = event['repetitions'] - output_bucket = event.get('output-bucket') - times = [] + output_bucket = event['output-bucket'] + i = 0 + times = [] socket.setdefaulttimeout(3) server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) @@ -22,7 +38,7 @@ def handler(event): message = request_id.encode('utf-8') adr = (address, port) consecutive_failures = 0 - while i < repetitions + 1: + while i <= repetitions: try: send_begin = datetime.now().timestamp() server_socket.sendto(message, adr) @@ -32,8 +48,8 @@ def handler(event): i += 1 consecutive_failures += 1 if consecutive_failures == 5: - print("Can't setup the connection") - break + server_socket.close() + return { 'status': 'failure', 'result': 'Unable to setup connection' } continue if i > 0: times.append([i, send_begin, recv_end]) @@ -42,14 +58,12 @@ def handler(event): server_socket.settimeout(2) server_socket.close() - if consecutive_failures != 5: - with open('/tmp/data.csv', 'w', newline='') as csvfile: - writer = csv.writer(csvfile, delimiter=',') - writer.writerow(["id", "client_send", "client_rcv"]) - for row in times: - writer.writerow(row) - - client = storage.storage.get_instance() - key = client.upload(output_bucket, 'results-{}.csv'.format(request_id), '/tmp/data.csv') - - return { 'result': key } + with open('/tmp/data.csv', 'w', newline='') as csvfile: + writer = csv.writer(csvfile, delimiter=',') + writer.writerow(["id", "client_send", "client_rcv"]) + for row in times: + writer.writerow(row) + + client = storage.storage.get_instance() + key = client.upload(output_bucket, f'results-{request_id}.csv', '/tmp/data.csv') + return { 'status': 'success', 'result': 'Returned with no error', 'measurement': key } diff --git a/benchmarks/000.microbenchmarks/020.network-benchmark/python/requirements.txt b/benchmarks/000.microbenchmarks/020.network-benchmark/python/requirements.txt new file mode 100644 index 00000000..e14f911d --- /dev/null +++ b/benchmarks/000.microbenchmarks/020.network-benchmark/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/000.microbenchmarks/030.clock-synchronization/python/function.py b/benchmarks/000.microbenchmarks/030.clock-synchronization/python/function.py index 26477875..85e9627a 100644 --- a/benchmarks/000.microbenchmarks/030.clock-synchronization/python/function.py +++ b/benchmarks/000.microbenchmarks/030.clock-synchronization/python/function.py @@ -3,19 +3,39 @@ import socket from datetime import datetime from time import sleep +from jsonschema import validate from . import storage def handler(event): + schema = { + "type": "object", + "required": [ "request_id", "server-address", "server-port", "repetitions", "output-bucket", "income-timestamp" ], + "properties": { + "request-id": {"type": "integer"}, + "server-address": {"type": "integer"}, + "server-port": {"type": "integer"}, + "repetitions": {"type": "integer"}, + "output-bucket": {"type": "object"}, + "income-timestamp": {"type": "number"} + } + } + try: + validate(event, schema=schema) + except: + # !? To return 'measurement': {'bucket-key': None, 'timestamp': event['income-timestamp']} + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + request_id = event['request-id'] address = event['server-address'] port = event['server-port'] repetitions = event['repetitions'] - output_bucket = event.get('output-bucket') - times = [] - print("Starting communication with {}:{}".format(address, port)) + output_bucket = event['output-bucket'] + i = 0 + times = [] + print(f"Starting communication with {address}:{port}") socket.setdefaulttimeout(4) server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) @@ -35,14 +55,15 @@ def handler(event): i += 1 consecutive_failures += 1 if consecutive_failures == 7: - print("Can't setup the connection") - break + server_socket.close() + # !? To return 'measurement': {'bucket-key': None, 'timestamp': event['income-timestamp']} + return { 'status': 'failure', 'result': 'Unable to setup connection' } continue if i > 0: times.append([i, send_begin, recv_end]) cur_time = recv_end - send_begin - print("Time {} Min Time {} NotSmaller {}".format(cur_time, cur_min, measurements_not_smaller)) - if cur_time > cur_min and cur_min > 0: + print(f"Time {cur_time} Min Time {cur_min} NotSmaller {measurements_not_smaller}") + if cur_time > cur_min > 0: measurements_not_smaller += 1 if measurements_not_smaller == repetitions: message = "stop".encode('utf-8') @@ -56,16 +77,13 @@ def handler(event): server_socket.settimeout(4) server_socket.close() - if consecutive_failures != 5: - with open('/tmp/data.csv', 'w', newline='') as csvfile: - writer = csv.writer(csvfile, delimiter=',') - writer.writerow(["id", "client_send", "client_rcv"]) - for row in times: - writer.writerow(row) - - client = storage.storage.get_instance() - key = client.upload(output_bucket, 'results-{}.csv'.format(request_id), '/tmp/data.csv') - else: - key = None + with open('/tmp/data.csv', 'w', newline='') as csvfile: + writer = csv.writer(csvfile, delimiter=',') + writer.writerow(["id", "client_send", "client_rcv"]) + for row in times: + writer.writerow(row) + + client = storage.storage.get_instance() + key = client.upload(output_bucket, f'results-{request_id}.csv', '/tmp/data.csv') - return { 'result': {'bucket-key': key, 'timestamp': event['income-timestamp']} } + return { 'status': 'success', 'result': 'Returned with no error', 'measurement': {'bucket-key': key, 'timestamp': event['income-timestamp']} } diff --git a/benchmarks/000.microbenchmarks/030.clock-synchronization/python/requirements.txt b/benchmarks/000.microbenchmarks/030.clock-synchronization/python/requirements.txt new file mode 100644 index 00000000..e14f911d --- /dev/null +++ b/benchmarks/000.microbenchmarks/030.clock-synchronization/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/000.microbenchmarks/040.server-reply/python/function.py b/benchmarks/000.microbenchmarks/040.server-reply/python/function.py index fb5b57aa..3167b307 100644 --- a/benchmarks/000.microbenchmarks/040.server-reply/python/function.py +++ b/benchmarks/000.microbenchmarks/040.server-reply/python/function.py @@ -1,13 +1,27 @@ - import socket from time import sleep +from jsonschema import validate def handler(event): + schema = { + "type": "object", + "required": ["ip-address", "port"], + "properties": { + "ip-address": {"type": "number"}, + "port": {"type": "integer"} + } + } + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + # start timing - addr = (event.get('ip-address'), event.get('port')) + addr = (event['ip-address'], event['port']) + socket.setdefaulttimeout(20) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(addr) msg = s.recv(1024).decode() - return {"result": msg} + return { 'status': 'success', 'result': 'Returned with no error', "measurement": msg } diff --git a/benchmarks/000.microbenchmarks/040.server-reply/python/requirements.txt b/benchmarks/000.microbenchmarks/040.server-reply/python/requirements.txt new file mode 100644 index 00000000..e14f911d --- /dev/null +++ b/benchmarks/000.microbenchmarks/040.server-reply/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/100.webapps/110.dynamic-html/python/function.py b/benchmarks/100.webapps/110.dynamic-html/python/function.py index 7c990f4e..5ebbc23e 100644 --- a/benchmarks/100.webapps/110.dynamic-html/python/function.py +++ b/benchmarks/100.webapps/110.dynamic-html/python/function.py @@ -1,7 +1,9 @@ from datetime import datetime from random import sample from os import path -from time import time +from time import time_ns +from jsonschema import validate + import os from jinja2 import Template @@ -10,13 +12,26 @@ def handler(event): + schema = { + "type": "object", + "required": ["username", "random_len"], + "properties": { + "username": {"type": "string"}, + "random_len": {"type": "integer"} + } + } + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } # start timing - name = event.get('username') - size = event.get('random_len') + name = event['username'] + size = event['random_len'] + cur_time = datetime.now() random_numbers = sample(range(0, 1000000), size) - template = Template( open(path.join(SCRIPT_DIR, 'templates', 'template.html'), 'r').read()) + template = Template(open(path.join(SCRIPT_DIR, 'templates', 'template.html'), 'r').read()) html = template.render(username = name, cur_time = cur_time, random_numbers = random_numbers) # end timing # dump stats - return {'result': html} + return { 'status': 'success', 'result': 'Returned with no error', 'measurement': html } diff --git a/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt b/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt index 5ca56944..0891d54e 100644 --- a/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt +++ b/benchmarks/100.webapps/110.dynamic-html/python/requirements.txt @@ -1 +1,2 @@ jinja2>=2.10.3 +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/100.webapps/120.uploader/python/function.py b/benchmarks/100.webapps/120.uploader/python/function.py index 828f8993..b1846e5d 100755 --- a/benchmarks/100.webapps/120.uploader/python/function.py +++ b/benchmarks/100.webapps/120.uploader/python/function.py @@ -1,20 +1,38 @@ - import datetime import os import uuid - import urllib.request +from jsonschema import validate from . import storage client = storage.storage.get_instance() - def handler(event): - output_bucket = event.get('bucket').get('output') - url = event.get('object').get('url') + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["output"] + }, + "object": { + "type": "object", + "required": ["url"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + output_bucket = event['bucket']['output'] + url = event['object']['url'] name = os.path.basename(url) - download_path = '/tmp/{}'.format(name) + download_path = f'/tmp/{name}' process_begin = datetime.datetime.now() urllib.request.urlretrieve(url, filename=download_path) @@ -28,12 +46,12 @@ def handler(event): process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) upload_time = (upload_end - upload_begin) / datetime.timedelta(microseconds=1) return { - 'result': { + 'status': 'success', + 'result': 'Returned with no error', + 'measurement': { 'bucket': output_bucket, 'url': url, - 'key': key_name - }, - 'measurement': { + 'key': key_name, 'download_time': 0, 'download_size': 0, 'upload_time': upload_time, diff --git a/benchmarks/100.webapps/120.uploader/python/requirements.txt b/benchmarks/100.webapps/120.uploader/python/requirements.txt new file mode 100644 index 00000000..e14f911d --- /dev/null +++ b/benchmarks/100.webapps/120.uploader/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/function.py b/benchmarks/200.multimedia/210.thumbnailer/python/function.py index d8286056..775970ac 100755 --- a/benchmarks/200.multimedia/210.thumbnailer/python/function.py +++ b/benchmarks/200.multimedia/210.thumbnailer/python/function.py @@ -5,6 +5,7 @@ import uuid from urllib.parse import unquote_plus from PIL import Image +from jsonschema import validate from . import storage client = storage.storage.get_instance() @@ -27,11 +28,31 @@ def resize_image(image_bytes, w, h): def handler(event): - input_bucket = event.get('bucket').get('input') - output_bucket = event.get('bucket').get('output') - key = unquote_plus(event.get('object').get('key')) - width = event.get('object').get('width') - height = event.get('object').get('height') + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["output", "input"] + }, + "object": { + "type": "object", + "required": ["key", "width", "height"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + input_bucket = event['bucket']['input'] + output_bucket = event['bucket']['output'] + key = unquote_plus(event['object']['key']) + width = event['object']['width'] + height = event['object']['height'] # UUID to handle multiple calls #download_path = '/tmp/{}-{}'.format(uuid.uuid4(), key) #upload_path = '/tmp/resized-{}'.format(key) @@ -55,11 +76,11 @@ def handler(event): upload_time = (upload_end - upload_begin) / datetime.timedelta(microseconds=1) process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': { - 'bucket': output_bucket, - 'key': key_name - }, + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { + 'bucket': output_bucket, + 'key': key_name, 'download_time': download_time, 'download_size': len(img), 'upload_time': upload_time, diff --git a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt index e69de29b..e14f911d 100644 --- a/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt +++ b/benchmarks/200.multimedia/210.thumbnailer/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/200.multimedia/220.video-processing/python/function.py b/benchmarks/200.multimedia/220.video-processing/python/function.py index 3907e75b..2426dc0e 100755 --- a/benchmarks/200.multimedia/220.video-processing/python/function.py +++ b/benchmarks/200.multimedia/220.video-processing/python/function.py @@ -4,6 +4,7 @@ import os import stat import subprocess +from jsonschema import validate from . import storage @@ -52,11 +53,32 @@ def transcode_mp3(video, duration, event): operations = { 'transcode' : transcode_mp3, 'extract-gif' : to_gif, 'watermark' : watermark } def handler(event): - input_bucket = event.get('bucket').get('input') - output_bucket = event.get('bucket').get('output') - key = event.get('object').get('key') - duration = event.get('object').get('duration') - op = event.get('object').get('op') + + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["output", "input"] + }, + "object": { + "type": "object", + "required": ["key", "duration", "op"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + input_bucket = event['bucket']['input'] + output_bucket = event['bucket']['output'] + key = event['object']['key'] + duration = event['object']['duration'] + op = event['object']['op'] download_path = '/tmp/{}'.format(key) # Restore executable permission @@ -87,11 +109,11 @@ def handler(event): upload_time = (upload_stop - upload_begin) / datetime.timedelta(microseconds=1) process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': { - 'bucket': output_bucket, - 'key': filename - }, + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { + 'bucket': output_bucket, + 'key': filename, 'download_time': download_time, 'download_size': download_size, 'upload_time': upload_time, @@ -99,4 +121,3 @@ def handler(event): 'compute_time': process_time } } - diff --git a/benchmarks/200.multimedia/220.video-processing/python/requirements.txt b/benchmarks/200.multimedia/220.video-processing/python/requirements.txt index e69de29b..e14f911d 100644 --- a/benchmarks/200.multimedia/220.video-processing/python/requirements.txt +++ b/benchmarks/200.multimedia/220.video-processing/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/300.utilities/311.compression/python/function.py b/benchmarks/300.utilities/311.compression/python/function.py index 29c8e507..e13928bd 100755 --- a/benchmarks/300.utilities/311.compression/python/function.py +++ b/benchmarks/300.utilities/311.compression/python/function.py @@ -4,6 +4,7 @@ import shutil import uuid import zlib +from jsonschema import validate from . import storage client = storage.storage.get_instance() @@ -17,11 +18,31 @@ def parse_directory(directory): return size def handler(event): - - input_bucket = event.get('bucket').get('input') - output_bucket = event.get('bucket').get('output') - key = event.get('object').get('key') - download_path = '/tmp/{}-{}'.format(key, uuid.uuid4()) + + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["output", "input"] + }, + "object": { + "type": "object", + "required": ["key"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + input_bucket = event['bucket']['input'] + output_bucket = event['bucket']['output'] + key = event['object']['key'] + download_path = f'/tmp/{key}-{uuid.uuid4()}' os.makedirs(download_path) s3_download_begin = datetime.datetime.now() @@ -34,7 +55,7 @@ def handler(event): compress_end = datetime.datetime.now() s3_upload_begin = datetime.datetime.now() - archive_name = '{}.zip'.format(key) + archive_name = f'{key}.zip' archive_size = os.path.getsize(os.path.join(download_path, archive_name)) key_name = client.upload(output_bucket, archive_name, os.path.join(download_path, archive_name)) s3_upload_stop = datetime.datetime.now() @@ -43,11 +64,11 @@ def handler(event): upload_time = (s3_upload_stop - s3_upload_begin) / datetime.timedelta(microseconds=1) process_time = (compress_end - compress_begin) / datetime.timedelta(microseconds=1) return { - 'result': { - 'bucket': output_bucket, - 'key': key_name - }, + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { + 'bucket': output_bucket, + 'key': key_name, 'download_time': download_time, 'download_size': size, 'upload_time': upload_time, @@ -55,4 +76,3 @@ def handler(event): 'compute_time': process_time } } - diff --git a/benchmarks/300.utilities/311.compression/python/requirements.txt b/benchmarks/300.utilities/311.compression/python/requirements.txt new file mode 100644 index 00000000..e14f911d --- /dev/null +++ b/benchmarks/300.utilities/311.compression/python/requirements.txt @@ -0,0 +1 @@ +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/400.inference/411.image-recognition/python/function.py b/benchmarks/400.inference/411.image-recognition/python/function.py index 0bdd18d8..14859963 100644 --- a/benchmarks/400.inference/411.image-recognition/python/function.py +++ b/benchmarks/400.inference/411.image-recognition/python/function.py @@ -1,4 +1,3 @@ - import datetime, json, os, uuid # Extract zipped torch model - used in Python 3.8 and 3.9 @@ -14,6 +13,7 @@ import torch from torchvision import transforms from torchvision.models import resnet50 +from jsonschema import validate from . import storage client = storage.storage.get_instance() @@ -25,11 +25,31 @@ def handler(event): - model_bucket = event.get('bucket').get('model') - input_bucket = event.get('bucket').get('input') - key = event.get('object').get('input') - model_key = event.get('object').get('model') - download_path = '/tmp/{}-{}'.format(key, uuid.uuid4()) + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["model", "input"] + }, + "object": { + "type": "object", + "required": ["input", "model"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { 'status': 'failure', 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + model_bucket = event['bucket']['model'] + input_bucket = event['bucket']['input'] + key = event['object']['input'] # !? it is 'input' or 'key' + model_key = event['object']['model'] + download_path = f'/tmp/{key}-{uuid.uuid4()}' image_download_begin = datetime.datetime.now() image_path = download_path @@ -71,13 +91,16 @@ def handler(event): ret = idx2label[index] process_end = datetime.datetime.now() - download_time = (image_download_end- image_download_begin) / datetime.timedelta(microseconds=1) + download_time = (image_download_end - image_download_begin) / datetime.timedelta(microseconds=1) model_download_time = (model_download_end - model_download_begin) / datetime.timedelta(microseconds=1) model_process_time = (model_process_end - model_process_begin) / datetime.timedelta(microseconds=1) process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': {'idx': index.item(), 'class': ret}, + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { + 'idx': index.item(), + 'class': ret, 'download_time': download_time + model_download_time, 'compute_time': process_time + model_process_time, 'model_time': model_process_time, diff --git a/benchmarks/400.inference/411.image-recognition/python/requirements.txt b/benchmarks/400.inference/411.image-recognition/python/requirements.txt index d191dc6d..33e97d58 100644 --- a/benchmarks/400.inference/411.image-recognition/python/requirements.txt +++ b/benchmarks/400.inference/411.image-recognition/python/requirements.txt @@ -2,3 +2,4 @@ #torchvision==0.4.0+cpu #https://download.pytorch.org/whl/cpu/torch-1.0.1.post2-cp37-cp37m-linux_x86_64.whl #torch==1.0.1.post2+cpu +jsonschema>=4.17.0 \ No newline at end of file diff --git a/benchmarks/500.scientific/501.graph-pagerank/python/function.py b/benchmarks/500.scientific/501.graph-pagerank/python/function.py index d6af95ba..fa1cc58e 100755 --- a/benchmarks/500.scientific/501.graph-pagerank/python/function.py +++ b/benchmarks/500.scientific/501.graph-pagerank/python/function.py @@ -4,6 +4,8 @@ def handler(event): size = event.get('size') + if size is None or not isinstance(size, (int, float)): + return { "status": "failure", 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } graph_generating_begin = datetime.datetime.now() graph = igraph.Graph.Barabasi(size, 10) @@ -17,7 +19,8 @@ def handler(event): process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': result[0], + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { 'graph_generating_time': graph_generating_time, 'compute_time': process_time diff --git a/benchmarks/500.scientific/502.graph-mst/python/function.py b/benchmarks/500.scientific/502.graph-mst/python/function.py index e499c8df..84f08298 100755 --- a/benchmarks/500.scientific/502.graph-mst/python/function.py +++ b/benchmarks/500.scientific/502.graph-mst/python/function.py @@ -4,6 +4,8 @@ def handler(event): size = event.get('size') + if size is None or not isinstance(size, (int, float)): + return { "status": "failure", 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } graph_generating_begin = datetime.datetime.now() graph = igraph.Graph.Barabasi(size, 10) @@ -17,9 +19,11 @@ def handler(event): process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': result[0], + 'status': 'success', + 'result': "Returned with no error", 'measurement': { 'graph_generating_time': graph_generating_time, - 'compute_time': process_time + 'compute_time': process_time, + 'result': result[0] } } diff --git a/benchmarks/500.scientific/503.graph-bfs/python/function.py b/benchmarks/500.scientific/503.graph-bfs/python/function.py index dcb2ff0b..762539d5 100755 --- a/benchmarks/500.scientific/503.graph-bfs/python/function.py +++ b/benchmarks/500.scientific/503.graph-bfs/python/function.py @@ -4,6 +4,8 @@ def handler(event): size = event.get('size') + if size is None or not isinstance(size, (int, float)): + return { "status": "failure", 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } graph_generating_begin = datetime.datetime.now() graph = igraph.Graph.Barabasi(size, 10) @@ -17,9 +19,11 @@ def handler(event): process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) return { - 'result': result, + 'status': 'success', + 'result': "Returned with no error", 'measurement': { 'graph_generating_time': graph_generating_time, - 'compute_time': process_time + 'compute_time': process_time, + 'result': result } } diff --git a/benchmarks/500.scientific/504.dna-visualisation/python/function.py b/benchmarks/500.scientific/504.dna-visualisation/python/function.py index 830b0ba2..78e1bca5 100755 --- a/benchmarks/500.scientific/504.dna-visualisation/python/function.py +++ b/benchmarks/500.scientific/504.dna-visualisation/python/function.py @@ -1,15 +1,36 @@ import datetime, io, json # using https://squiggle.readthedocs.io/en/latest/ from squiggle import transform +from jsonschema import validate from . import storage client = storage.storage.get_instance() def handler(event): - input_bucket = event.get('bucket').get('input') - output_bucket = event.get('bucket').get('output') - key = event.get('object').get('key') + schema = { + "type": "object", + "required": ["bucket", "object"], + "properties": { + "bucket": { + "type": "object", + "required": ["input", "output"] + }, + "object": { + "type": "object", + "required": ["key"] + } + } + } + + try: + validate(event, schema=schema) + except: + return { "status": "failure", 'result': 'Some value(s) is/are not found in JSON data or of incorrect type' } + + input_bucket = event['bucket']['input'] + output_bucket = event['bucket']['output'] + key = event['object']['key'] download_path = '/tmp/{}'.format(key) download_begin = datetime.datetime.now() @@ -30,13 +51,14 @@ def handler(event): download_time = (download_stop - download_begin) / datetime.timedelta(microseconds=1) process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1) + # !? To include upload_time return { - 'result': { - 'bucket': output_bucket, - 'key': key_name - }, + 'status': 'success', + 'result': 'Returned with no error', 'measurement': { + 'bucket': output_bucket, + 'key': key_name, 'download_time': download_time, 'compute_time': process_time } diff --git a/benchmarks/500.scientific/504.dna-visualisation/python/requirements.txt b/benchmarks/500.scientific/504.dna-visualisation/python/requirements.txt index 2f3ae7e3..4be7aa45 100755 --- a/benchmarks/500.scientific/504.dna-visualisation/python/requirements.txt +++ b/benchmarks/500.scientific/504.dna-visualisation/python/requirements.txt @@ -1 +1,2 @@ squiggle==0.3.1 +jsonschema>=4.17.0 \ No newline at end of file