-
Notifications
You must be signed in to change notification settings - Fork 1
/
utils.py
348 lines (279 loc) · 10.7 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
import mimetypes
import os
import sys
import zipfile
from pathlib import Path
# third party
import boto3
from azure.storage.blob import BlobServiceClient, ContentSettings
from boto3 import Session
from dotenv import load_dotenv
# local
from DirectoryClient import DirectoryClient
load_dotenv()
DO_SPACE = os.getenv("DO_SPACE")
DO_ACCESS_KEY_ID = os.getenv("DO_ACCESS_KEY_ID")
DO_SECRET_ACCESS_KEY = os.getenv("DO_SECRET_ACCESS_KEY")
AZURE_STORAGE_ACCOUNT_NAME = os.getenv("AZURE_STORAGE_ACCOUNT_NAME")
AZURE_STORAGE_CONNECTION_STRING = os.getenv("AZURE_STORAGE_CONNECTION_STRING")
def azure_get_blob_service_client():
blob_service_client = None
try:
blob_service_client = BlobServiceClient.from_connection_string(
AZURE_STORAGE_CONNECTION_STRING
)
except Exception as ex:
print(ex)
return blob_service_client
def azure_backup_container(src_container, dest_container):
blob_service_client = azure_get_blob_service_client()
src_container_url = (
f"https://{AZURE_STORAGE_ACCOUNT_NAME}.blob.core.windows.net/{src_container}"
)
src_container_client = DirectoryClient(
AZURE_STORAGE_CONNECTION_STRING, src_container
)
dest_container_client = blob_service_client.get_container_client(dest_container)
for blob in src_container_client.ls_files(path=""):
blob_path = blob.replace("\\", "/")
blob_url = f"{src_container_url}/{blob_path}"
dest_blob = dest_container_client.get_blob_client(blob_path)
print(f"Start copying: {blob_url}")
dest_blob.start_copy_from_url(blob_url)
# containers = blob_service_client.make_blob_url(src_container, "index.html")
# return containers
def azure_create_container(container):
try:
print(f"Creating container: {container}")
blob_service_client = azure_get_blob_service_client()
container_client = blob_service_client.get_container_client(container)
container_client.create_container()
properties = container_client.get_container_properties()
print(f"----------- properties of new container {container} --------------")
print(properties)
print("-------------------------------------------------------------------")
except Exception as ex:
print(ex)
def azure_get_containers(prefix):
blob_service_client = azure_get_blob_service_client()
containers = blob_service_client.list_containers(name_starts_with=prefix)
return containers
def azure_delete_dir(container, prefix):
client = DirectoryClient(AZURE_STORAGE_CONNECTION_STRING, container)
client.rmdir(prefix)
def azure_download(container, source, dest):
client = DirectoryClient(AZURE_STORAGE_CONNECTION_STRING, container)
client.download(source, dest)
# TODO: needs some integration with guessing mimetypes
# def azure_upload(container, source, dest):
# client = DirectoryClient(AZURE_STORAGE_CONNECTION_STRING, container)
# client.upload(source, dest)
def azure_upload_dir(local_directory, container):
try:
blob_service_client = azure_get_blob_service_client()
container_client = blob_service_client.get_container_client(container)
for root, dirs, files in os.walk(local_directory):
for filename in files:
# construct the full local path
local_path = os.path.join(root, filename)
mimetype = guess_mimetype(local_path)
# construct the full Dropbox path
relative_path = os.path.relpath(local_path, local_directory)
# Upload the file
print("Uploading:\t" + relative_path)
content_settings = ContentSettings(content_type=mimetype)
with open(local_path, "rb") as data:
container_client.upload_blob(
name=relative_path, data=data, content_settings=content_settings
)
except Exception as ex:
print(ex)
def guess_mimetype(local_file, default_mimetype="binary/octet-stream"):
"""
:param local_file:
:param default_mimetype:
:return: String
https://gist.github.com/feelinc/d1f541af4f31d09a2ec3
"""
mimetype, _ = mimetypes.guess_type(local_file)
# wouldn't be needed if this is resolved
# https://bugs.python.org/issue39324
if not mimetype and str(local_file).endswith(".md"):
mimetype = "text/markdown"
if not mimetype:
mimetype = default_mimetype
# print(f"Failed to guess mimetype for {local_file}. Setting to {mimetype}")
return mimetype
def do_delete_dir(destination):
# enumerate local files recursively
s3 = boto3.resource(
"s3",
region_name="nyc3",
endpoint_url="https://nyc3.digitaloceanspaces.com",
aws_access_key_id=DO_ACCESS_KEY_ID,
aws_secret_access_key=DO_SECRET_ACCESS_KEY,
)
bucket = s3.Bucket(DO_SPACE)
bucket.objects.filter(Prefix=destination).delete()
def do_download_dir(client, bucket, remote_folder, local_folder):
"""
params:
- client: initialized s3 client object
- bucket: s3 bucket with target contents
- remote_folder: pattern to match in s3
- local_folder: local path to folder in which to place files
not used now, but could be useful to download the whole _build directory from the Travis CI build to test
https://stackoverflow.com/a/56267603
"""
keys = []
dirs = []
next_token = ""
base_kwargs = {
"Bucket": bucket,
"Prefix": remote_folder,
}
while next_token is not None:
kwargs = base_kwargs.copy()
if next_token != "":
kwargs.update({"ContinuationToken": next_token})
results = client.list_objects_v2(**kwargs)
contents = results.get("Contents")
for i in contents:
k = i.get("Key")
if k[-1] != "/":
keys.append(k)
else:
dirs.append(k)
next_token = results.get("NextContinuationToken")
for d in dirs:
dest_pathname = os.path.join(local_folder, d)
if not os.path.exists(os.path.dirname(dest_pathname)):
os.makedirs(os.path.dirname(dest_pathname))
for k in keys:
dest_pathname = os.path.join(local_folder, k)
if not os.path.exists(os.path.dirname(dest_pathname)):
os.makedirs(os.path.dirname(dest_pathname))
client.download_file(bucket, k, dest_pathname)
def do_download_file(remote_file, local_file, force=False):
"""
Used in Travis CI to download and unzip the file of original artwork from Digitalocean Spaces.
Once run, they should be unzipped so that there is a top level directory called albums.
The structure of the folder is such:
albums/
|--- dmg/
| |--- dmg_bottom/
| | |--- 0001.png
| | |--- 000x.png
| | |--- index.md
| |--- dmg_xxxx/
| |--- index.md
|--- maybe-later/
|--- phb/
|--- templates/
|--- index.md
"""
session = Session()
client = session.client(
"s3",
region_name="nyc3",
endpoint_url="https://nyc3.digitaloceanspaces.com",
aws_access_key_id=DO_ACCESS_KEY_ID,
aws_secret_access_key=DO_SECRET_ACCESS_KEY,
)
file = Path(local_file)
if not file.exists():
print(f"Downloading: {remote_file}")
elif force:
print(f"Forcing download, overwriting {local_file}")
else:
print(f"{local_file} already downloaded, using local copy")
return
client.download_file(DO_SPACE, remote_file, local_file)
def do_upload_dir(local_directory, destination):
"""
:param local_directory:
:param destination:
:return:
"""
# Digitalocean Spaces
bucket = DO_SPACE
session = Session()
client = session.client(
"s3",
region_name="nyc3",
endpoint_url="https://nyc3.digitaloceanspaces.com",
aws_access_key_id=DO_ACCESS_KEY_ID,
aws_secret_access_key=DO_SECRET_ACCESS_KEY,
)
# enumerate local files recursively
for root, dirs, files in os.walk(local_directory):
for filename in files:
# construct the full local path
local_path = os.path.join(root, filename)
mimetype = guess_mimetype(local_path)
# construct the full Dropbox path
relative_path = os.path.relpath(local_path, local_directory)
s3_path = os.path.join(destination, relative_path)
# relative_path = os.path.relpath(os.path.join(root, filename))
# print(f"Searching {s3_path} in {bucket}")
try:
client.head_object(Bucket=bucket, Key=s3_path)
print(f"Path found on S3! Skipping {s3_path}...")
# try:
# client.delete_object(Bucket=bucket, Key=s3_path)
# except:
# print(f"Unable to delete {s3_path}...")
except:
print(f"Uploading {s3_path}...")
client.upload_file(
local_path,
bucket,
s3_path,
ExtraArgs={"ACL": "public-read", "ContentType": mimetype},
)
def do_upload_file(archive_file, upload_location):
"""
:param archive_file:
:param upload_location:
:return:
"""
session = Session()
client = session.client(
"s3",
region_name="nyc3",
endpoint_url="https://nyc3.digitaloceanspaces.com",
aws_access_key_id=DO_ACCESS_KEY_ID,
aws_secret_access_key=DO_SECRET_ACCESS_KEY,
)
client.upload_file(archive_file, DO_SPACE, upload_location)
def unzip_file(filename):
with zipfile.ZipFile(filename, "r") as zip_ref:
print(f"Extracting {filename}. Should create a top level `albums` directory.")
zip_ref.extractall()
def zipdir(dir_to_zip, archive_file):
"""
:param dir_to_zip:
:param archive_file:
:return:
https://stackoverflow.com/a/1855118
"""
with zipfile.ZipFile(archive_file, "w", zipfile.ZIP_DEFLATED) as zipf:
for root, dirs, files in os.walk(dir_to_zip):
for file in files:
zipf.write(os.path.join(root, file))
def remove_empty_folders(path, remove_root=True):
"""Function to remove empty folders."""
if not os.path.isdir(path):
return
# remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
remove_empty_folders(fullpath)
# if folder empty, delete it
files = os.listdir(path)
if len(files) == 0 and remove_root:
# print("Removing empty folder:", path)
os.rmdir(path)