-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathgcs.py
222 lines (180 loc) · 8.05 KB
/
gcs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
import os
import logging
from datetime import timedelta, datetime
from flask import request, abort, make_response, current_app, jsonify
from flask.views import MethodView
from google.appengine.api import images, app_identity
from google.appengine.ext import blobstore
import cloudstorage
cloudstorage.set_default_retry_params(
cloudstorage.RetryParams(
initial_delay=0.2, max_delay=5.0, backoff_factor=2, max_retry_period=15
)
)
from . import utils
GCS_BUCKET = os.environ['GCS_BUCKET']
PUBLIC_DOMAIN = os.environ['PUBLIC_DOMAIN']
ALLOW_ORIGINS = os.environ['ALLOW_ORIGINS'].split(',')
# restrict uploads to these extensions
# . must be included for comparrison to splitext()
EXTENSIONS = ['.webp','.jpg','.jpeg','.png','.gif']
SIGNED_URL_EXPIRES_SECONDS = 900 # 15 minutes
FILEPATH_HASH_LENGTH = 8
def abort_json(status_code, message):
json_response = jsonify({"error":{"kind":"abort", "message": message}})
json_response.status_code = status_code
abort(json_response)
def make_response_validation_error(param, location='query', message='There was a input validation error', expected='string'):
response = jsonify({
"error": {
"kind": "validation",
"location": location,
"param": param,
"message": message,
"example": expected
}})
response.status_code = 422
return response
def add_cors_headers(response):
# Allow our origins (can be multiple)
response.headers.extend([('Access-Control-Allow-Origin', origin) for origin in ALLOW_ORIGINS])
# Allow the actual method
response.headers['Access-Control-Allow-Methods'] = 'GET, POST, DELETE'
# Allow for 60 seconds
response.headers['Access-Control-Max-Age'] = "60"
# Allow sending of cookie and http auth headers
response.headers['Access-Control-Allow-Credentials'] = "true"
# 'preflight' request contains the non-standard headers the real request will have (like X-Api-Key)
# NOTE we can filter out headers we dont want to allow here if we wanted to
request_headers = request.headers.get('Access-Control-Request-Headers')
if request_headers:
response.headers['Access-Control-Allow-Headers'] = request_headers
return response
class BaseUpload(MethodView):
def options(self):
""" Allow CORS for specific origins
"""
resp = current_app.make_default_options_response()
return add_cors_headers(response)
def get(self):
""" Creates a signed URL for uploading a image/file object to Google Cloud Storage
Returns: {
"upload": {
"method": http_method,
"url": signed_url,
"timestamp": request_timestamp,
"expires": expiration
},
"object": {
"path": filepath in gcs bucket,
"location": storage bucket and path,
"url": url to access the file via https
}
}
"""
filename = request.args.get('filename')
if not filename:
return make_response_validation_error('filename', message='Parameter filename is required')
datetime_now = datetime.utcnow()
# generate a unique file path for new file uploads
salt = utils.random_hash(FILEPATH_HASH_LENGTH)
filename = os.path.basename(filename)
filename, file_extension = os.path.splitext(filename)
if file_extension not in self.extensions:
message = "Parameter filename has an invalid extension, please only send {}".format(self.extensions)
return make_response_validation_error('filename', message=message)
# remove unicode and other rubbish from filename
slug = utils.slugify(filename)
# assemble filepath based on year/month/randomsalt/slug.extension
filepath = "{}/{}/{}/{}{}".format(
datetime_now.year,
datetime_now.strftime('%m'),
salt,
slug,
file_extension
)
http_method = 'PUT'
expires = datetime_now + timedelta(seconds=SIGNED_URL_EXPIRES_SECONDS)
# generate the signed url
signed_url = utils.generate_gcs_v4_signed_url(self.bucket, filepath, http_method, SIGNED_URL_EXPIRES_SECONDS)
response = jsonify({
"upload": {
"method": http_method,
"url": signed_url,
"expires": expires.isoformat()
},
"object": self._object_schema(filepath)
})
return add_cors_headers(response)
def _object_schema(self, filepath, dynamic_url=None):
o = {
"path": filepath,
"url": "https://{}/{}".format(self.domain, filepath),
"location": "gs://{}/{}".format(self.bucket, filepath)
}
if dynamic_url:
o['dynamic_url'] = dynamic_url
return o
def delete(self):
"""Delete the original file and dynamic serving url if it exists
"""
filepath = request.args.get('filepath')
if not filepath:
return make_response_validation_error('filepath', message='Parameter filepath is required')
try:
cloudstorage.delete(filename)
except cloudstorage.AuthorizationError:
abort_json(401, "Unauthorized request has been received by GCS.")
except cloudstorage.ForbiddenError:
abort_json(403, "Cloud Storage Forbidden Error. GCS replies with a 403 error for many reasons, the most common one is due to bucket permission not correctly setup for your app to access.")
except cloudstorage.NotFoundError:
abort_json(404, filepath + " not found on GCS in bucket " + self.bucket)
except cloudstorage.TimeoutError:
abort_json(408, 'Remote timed out')
# TODO get the query string and delete file if asked to
blobstore_filename = u'/gs/{}/{}'.format(bucket_name, filepath)
blob_key = blobstore.create_gs_key(blobstore_filename)
try:
images.delete_serving_url(blob_key)
except images.AccessDeniedError:
abort_json(403, "App Engine Images API Access Denied Error. Files has already been deleted from Cloud Storage")
except images.ObjectNotFoundError:
pass
return '', 204
class ImagesAPI(BaseUpload):
bucket = GCS_BUCKET
domain = PUBLIC_DOMAIN
extensions = EXTENSIONS
def post(self):
""" Create a dynamic serving url
Returns: {
"object": {
"path": filepath in bucket,
"location": storage bucket and path,
"url": url to access the file via https
"dynamic_url": url to access the file with dynamic image handling
}
}
"""
filepath = request.args.get('path')
if not filepath:
return make_response_validation_error('path', message='Parameter path is required and should contain the GCS object name')
blobstore_filename = u'/gs/{}/{}'.format(self.bucket, filepath)
blob_key = blobstore.create_gs_key(blobstore_filename)
try:
dynamic_url = images.get_serving_url(blob_key, secure_url=True)
# return the dynamic url with the rest of the object data
response = jsonify({
"object": self._object_schema(filepath, dynamic_url)
})
response.status_code = 201
return add_cors_headers(response)
except images.AccessDeniedError:
abort_json(403, u"App Engine Images API Access Denied Error. Files has already been deleted from Cloud Storage")
except images.ObjectNotFoundError:
abort_json(404, u"App Engine Images API could not find " + filepath + " in Cloud Storage bucket " + self.bucket)
except images.NotImageError:
abort_json(405, u"App Engine Images API says " + filepath + " is not an image")
except (images.TransformationError, images.UnsupportedSizeError, images.LargeImageError) as e:
logging.exception('Requires investigation')
abort_json(409, str(e))