diff --git a/requirements-dev.txt b/requirements-dev.txt index a67f53653e..8f5c2e8b12 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,7 +9,7 @@ boto==2.48.0 boto3==1.4.8 botocore==1.8.3 cachetools==2.0.1 -certifi==2017.11.5 +certifi==2018.1.18 cffi==1.11.2 chalice==1.0.4 chardet==3.0.4 @@ -21,7 +21,7 @@ connexion==1.1.15 cookies==2.2.1 coverage==4.4.2 crcmod==1.7 -cryptography==2.1.3 +cryptography==2.1.4 docker==2.6.1 docker-pycreds==0.2.1 docutils==0.14 @@ -36,6 +36,7 @@ google-cloud-core==0.28.0 google-cloud-storage==1.6.0 google-resumable-media==0.3.1 googleapis-common-protos==1.5.3 +google-apitools==0.5.21 httpie==0.9.9 idna==2.6 inflection==0.3.1 diff --git a/requirements.txt b/requirements.txt index 42679d689c..8f45bd31c6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,14 +6,14 @@ azure-storage==0.36.0 boto3==1.4.8 botocore==1.8.3 cachetools==2.0.1 -certifi==2017.11.5 +certifi==2018.1.18 cffi==1.11.2 chardet==3.0.4 click==6.7 clickclick==1.2.2 cloud-blobstore==2.0.0 connexion==1.1.15 -cryptography==2.1.3 +cryptography==2.1.4 docutils==0.14 elasticsearch==5.5.1 elasticsearch-dsl==5.3.0 @@ -24,6 +24,7 @@ google-cloud-core==0.28.0 google-cloud-storage==1.6.0 google-resumable-media==0.3.1 googleapis-common-protos==1.5.3 +google-apitools==0.5.21 idna==2.6 inflection==0.3.1 iso8601==0.1.12 diff --git a/requirements.txt.in b/requirements.txt.in index d03795ef85..f6967341a6 100644 --- a/requirements.txt.in +++ b/requirements.txt.in @@ -6,6 +6,7 @@ connexion == 1.1.15 # pinned by akislyuk due to upstream breaking changes in aut elasticsearch >= 5.4.0, < 6.0.0 elasticsearch-dsl >= 5.3.0 google-cloud-storage >= 1.4.0 +google-apitools >= 0.5.0 iso8601 >= 0.1.12 jsonschema >= 2.6.0 nestedcontext >= 0.0.4 diff --git a/scripts/deploy_gcf.py b/scripts/deploy_gcf.py index d6be7b06f6..484b0cdea6 100755 --- a/scripts/deploy_gcf.py +++ b/scripts/deploy_gcf.py @@ -6,8 +6,11 @@ import os, sys, time, io, zipfile, random, string, binascii, datetime, argparse, base64 import boto3 +import socket +import httplib2 import google.cloud.storage import google.cloud.exceptions +from apitools.base.py import http_wrapper from google.cloud.client import ClientWithProject from google.cloud._http import JSONConnection from urllib3.util.retry import Retry @@ -33,8 +36,6 @@ class GoogleRuntimeConfigConnection(JSONConnection): args.gcf_name = "-".join([args.src_dir, os.environ["DSS_DEPLOYMENT_STAGE"]]) gcp_region = os.environ["GCP_DEFAULT_REGION"] -gcp_key_file = os.environ["GOOGLE_APPLICATION_CREDENTIALS"] -gs = google.cloud.storage.Client.from_service_account_json(gcp_key_file) gcp_client = GCPClient() gcp_client._http.adapters["https://"].max_retries = Retry(status_forcelist={503, 504}) grtc_conn = GoogleRuntimeConfigConnection(client=gcp_client) @@ -66,54 +67,75 @@ class GoogleRuntimeConfigConnection(JSONConnection): except google.cloud.exceptions.Conflict: grtc_conn.api_request("PUT", f"/{var_ns}/{k}", data=dict(name=f"{var_ns}/{k}", value=b64v)) -try: - now = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") - deploy_filename = "{}-deploy-{}-{}.zip".format(args.gcf_name, now, binascii.hexlify(os.urandom(4)).decode()) - deploy_blob = gs.bucket(os.environ["DSS_GS_BUCKET_TEST_FIXTURES"]).blob(deploy_filename) - with io.BytesIO() as buf: - with zipfile.ZipFile(buf, 'w', compression=zipfile.ZIP_DEFLATED) as zbuf: - for root, dirs, files in os.walk(args.src_dir): - for f in files: - archive_path = os.path.relpath(os.path.join(root, f), args.src_dir) - if archive_path.startswith("node_modules"): - continue - print("Adding", archive_path) - zbuf.write(os.path.join(root, f), archive_path) - zbuf.close() - deploy_blob.upload_from_string(buf.getvalue()) - print("Uploaded", deploy_blob) - - gcf_config = { - "name": f"{gcf_ns}/{args.gcf_name}", - "entryPoint": args.entry_point, - "timeout": "60s", - "availableMemoryMb": 256, - "sourceArchiveUrl": f"gs://{deploy_blob.bucket.name}/{deploy_blob.name}", - "eventTrigger": { - "eventType": "providers/cloud.storage/eventTypes/object.change", - "resource": "projects/_/buckets/" + os.environ['DSS_GS_BUCKET'] +resp = gcf_conn.api_request('POST', f'/{gcf_ns}:generateUploadUrl', content_type='application/zip') +upload_url = resp['uploadUrl'] + +now = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S") +deploy_filename = "{}-deploy-{}-{}.zip".format(args.gcf_name, now, binascii.hexlify(os.urandom(4)).decode()) +with io.BytesIO() as buf: + with zipfile.ZipFile(buf, 'w', compression=zipfile.ZIP_DEFLATED) as zbuf: + for root, dirs, files in os.walk(args.src_dir): + for f in files: + archive_path = os.path.relpath(os.path.join(root, f), args.src_dir) + if archive_path.startswith("node_modules"): + continue + print("Adding", archive_path) + zbuf.write(os.path.join(root, f), archive_path) + zbuf.close() + + upload_data = buf.getvalue() + + # BEGIN: xbrianh - Code reproduced-ish from Google gcloud utility + upload_request = http_wrapper.Request( + upload_url, http_method='PUT', + headers={ + 'content-type': 'application/zip', + # Magic header, request will fail without it. + # Not documented at the moment this comment was being written. + 'x-goog-content-length-range': '0,104857600', + 'Content-Length': '{0:d}'.format(len(upload_data)) } + ) + upload_request.body = upload_data + if socket.getdefaulttimeout() is not None: + http_timeout = socket.getdefaulttimeout() + else: + http_timeout = 60 + response = http_wrapper.MakeRequest( + httplib2.Http(timeout=http_timeout), + upload_request, + ) + # END + +gcf_config = { + "name": f"{gcf_ns}/{args.gcf_name}", + "entryPoint": args.entry_point, + "timeout": "60s", + "availableMemoryMb": 256, + "sourceUploadUrl": upload_url, + "eventTrigger": { + "eventType": "providers/cloud.storage/eventTypes/object.change", + "resource": "projects/_/buckets/" + os.environ['DSS_GS_BUCKET'] } +} - try: - print(gcf_conn.api_request("POST", f"/{gcf_ns}", data=gcf_config)) - except google.cloud.exceptions.Conflict: - print(gcf_conn.api_request("PUT", f"/{gcf_ns}/{args.gcf_name}", data=gcf_config)) +try: + print(gcf_conn.api_request("POST", f"/{gcf_ns}", data=gcf_config)) +except google.cloud.exceptions.Conflict: + print(gcf_conn.api_request("PUT", f"/{gcf_ns}/{args.gcf_name}", data=gcf_config)) - sys.stderr.write("Waiting for deployment...") +sys.stderr.write("Waiting for deployment...") +sys.stderr.flush() +for t in range(600): + if gcf_conn.api_request("GET", f"/{gcf_ns}/{args.gcf_name}")["status"] != "DEPLOYING": + break + sys.stderr.write(".") sys.stderr.flush() - for t in range(90): - if gcf_conn.api_request("GET", f"/{gcf_ns}/{args.gcf_name}")["status"] != "DEPLOYING": - break - sys.stderr.write(".") - sys.stderr.flush() - time.sleep(1) - else: - sys.exit("Timeout while waiting for GCF deployment to complete") - sys.stderr.write("done\n") - - res = gcf_conn.api_request("GET", f"/{gcf_ns}/{args.gcf_name}") - print(res) - assert res["status"] == "READY" -finally: - deploy_blob.delete() + time.sleep(5) +else: + sys.exit("Timeout while waiting for GCF deployment to complete") +sys.stderr.write("done\n") + +res = gcf_conn.api_request("GET", f"/{gcf_ns}/{args.gcf_name}") +print(res) +assert res["status"] == "READY"