forked from kjkjava/garmin-connect-export
-
Notifications
You must be signed in to change notification settings - Fork 19
/
gcexport3.py
executable file
·785 lines (717 loc) · 30 KB
/
gcexport3.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
#!/usr/bin/python3.12
# -*- coding: utf-8 -*-
"""File: gcexport.py.
Original author: Kyle Krafka (https://github.com/kjkjava/)
Date: April 28, 2015
Fork author: Michael P (https://github.com/moderation/)
Date: December 28, 2023
Description: Use this script to export your fitness data from Garmin Connect.
See README.md for more information.
Activity & event types:
https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties
https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties
"""
import argparse
import http.cookiejar
import json
import logging
import re
import urllib.parse
import urllib.request
import zipfile
from datetime import datetime, timedelta
from getpass import getpass
from pathlib import Path
from subprocess import call
from sys import argv, exit
from urllib.error import HTTPError, URLError
import garth
from defusedxml.minidom import parseString
SCRIPT_VERSION = "3.0.0"
CURRENT_DATE = datetime.now().strftime("%Y-%m-%d")
ACTIVITIES_DIRECTORY = "./" + CURRENT_DATE + "_garmin_connect_export"
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/204
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500
HTTP_OK = 200
HTTP_NO_CONTENT = 204
HTTP_NOT_FOUND = 404
HTTP_INTERNAL_SERVER_ERROR = 500
PARSER = argparse.ArgumentParser()
# TODO @moderation: Implement verbose and/or quiet options.
# PARSER.add_argument('-v', '--verbose', help="increase output verbosity", action="store_true")
PARSER.add_argument("--version", help="print version and exit", action="store_true")
PARSER.add_argument(
"--username",
help="your Garmin Connect username (otherwise, you will be prompted)",
nargs="?",
)
PARSER.add_argument(
"--password",
help="your Garmin Connect password (otherwise, you will be prompted)",
nargs="?",
)
PARSER.add_argument(
"-c",
"--count",
nargs="?",
default="1",
help="number of recent activities to download, or 'all' (default: 1)",
)
PARSER.add_argument(
"-e",
"--external",
nargs="?",
default="",
help="path to external program to pass CSV file too (default: )",
)
PARSER.add_argument(
"-a",
"--args",
nargs="?",
default="",
help="additional arguments to pass to external program (default: )",
)
PARSER.add_argument(
"-f",
"--format",
nargs="?",
choices=["gpx", "tcx", "original"],
default="gpx",
help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')",
)
PARSER.add_argument(
"-d",
"--directory",
nargs="?",
default=ACTIVITIES_DIRECTORY,
help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')",
)
PARSER.add_argument(
"-u",
"--unzip",
help=(
"if downloading ZIP files (format: 'original'), unzip the file and removes the"
" ZIP file"
),
action="store_true",
)
ARGS = PARSER.parse_args()
if ARGS.version:
print(argv[0] + ", version " + SCRIPT_VERSION)
exit(0)
COOKIE_JAR = http.cookiejar.CookieJar()
OPENER = urllib.request.build_opener(
urllib.request.HTTPCookieProcessor(COOKIE_JAR),
urllib.request.HTTPSHandler(debuglevel=0),
)
# print(COOKIE_JAR)
def hhmmss_from_seconds(sec):
"""Helper function that converts seconds to HH:MM:SS time format."""
if isinstance(sec, float):
formatted_time = str(timedelta(seconds=int(sec))).zfill(8)
else:
formatted_time = "0.000"
return formatted_time
def kmh_from_mps(mps):
"""Helper function that converts meters per second (mps) to km/h."""
return str(mps * 3.6)
def write_to_file(filename, content, mode):
"""Helper function that persists content to file."""
with Path(filename).open(mode) as write_file:
write_file.write(content)
write_file.close()
def decoding_decider(data):
"""Helper function that decides if a decoding should happen or not."""
if ARGS.format != "original":
# An original file (ZIP file) is binary and not UTF-8 encoded
# GPX and TCX are textfiles and UTF-8 encoded
data = data.decode()
return data
# url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers.
def http_req(url, post=None, headers=None):
"""Helper function that makes the HTTP requests."""
request = urllib.request.Request(url)
# Tell Garmin we're some supported browser.
request.add_header(
"User-Agent",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36",
)
request.add_header(
"nk",
"NT",
) # necessary since 2021-02-23 to avoid http error code 402
request.add_header("authorization", str(garth.client.oauth2_token))
request.add_header("di-backend", "connectapi.garmin.com")
if headers:
for header_key, header_value in headers.items():
request.add_header(header_key, header_value)
if post:
post = urllib.parse.urlencode(post)
post = post.encode("utf-8") # Convert dictionary to POST parameter string.
# print("request.headers: " + str(request.headers) + " COOKIE_JAR: " + str(COOKIE_JAR))
# print("post: " + str(post) + "request: " + str(request))
try:
response = OPENER.open(request, data=post)
except HTTPError as ex:
if hasattr(ex, "code"):
logging.error(
"Server couldn't fulfill the request, url %s, code %s, error: %s",
url,
ex.code,
ex,
)
logging.info("Headers returned:\n%s", ex.info())
raise
except URLError as ex:
if hasattr(ex, "reason"):
logging.error("Failed to reach url %s, error: %s", url, ex)
raise
logging.debug("Got %s from %s", response.getcode(), url)
logging.debug("Headers returned:\n%s", response.info())
if response.getcode() == HTTP_NO_CONTENT:
# For activities without GPS coordinates, there is no GPX download (204 = no content).
# Write an empty file to prevent redownloading it.
print("Writing empty file since there was no GPX activity data...")
return ""
elif response.getcode() != HTTP_OK:
raise Exception(f"Bad return code ({response.getcode()}) for: {url}")
# print(response.getcode())
return response.read()
print("Welcome to Garmin Connect Exporter!")
# Create directory for data files.
if Path(ARGS.directory).is_dir():
print(
"Warning: Output directory already exists. Will skip already-downloaded files"
" and append to the CSV file.",
)
USERNAME = ARGS.username if ARGS.username else input("Username: ")
PASSWORD = ARGS.password if ARGS.password else getpass()
# Maximum number of activities you can request at once. Set and enforced by Garmin.
LIMIT_MAXIMUM = 1000
BASE_URL = "https://connect.garmin.com"
REDIRECT = f"{BASE_URL}/modern/"
SIGNIN = f"{BASE_URL}/en-US/signin"
DATA = {
"service": REDIRECT,
"webhost": BASE_URL,
"source": SIGNIN,
"redirectAfterAccountLoginUrl": REDIRECT,
"redirectAfterAccountCreationUrl": REDIRECT,
"locale": "en_US",
"id": "gauth-widget",
"clientId": "GarminConnect",
"rememberMeShown": "true",
"rememberMeChecked": "false",
"createAccountShown": "true",
"openCreateAccount": "false",
"displayNameShown": "false",
"consumeServiceTicket": "false",
"initialFocus": "true",
"embedWidget": "false",
"generateExtraServiceTicket": "true",
"generateTwoExtraServiceTickets": "false",
"generateNoServiceTicket": "false",
"globalOptInShown": "true",
"globalOptInChecked": "false",
"mobile": "false",
"connectLegalTerms": "true",
"locationPromptShown": "true",
"showPassword": "true",
}
print(urllib.parse.urlencode(DATA))
# URLs for various services.
URL_GC_PROFILE = f"{BASE_URL}/userprofile-service/socialProfile"
URL_GC_USERSTATS = f"{BASE_URL}/userstats-service/statistics/"
URL_GC_LIST = f"{BASE_URL}/activitylist-service/activities/search/activities?"
URL_GC_ACTIVITY = f"{BASE_URL}/activity-service/activity/"
URL_GC_GPX_ACTIVITY = f"{BASE_URL}/download-service/export/gpx/activity/"
URL_GC_TCX_ACTIVITY = f"{BASE_URL}/download-service/export/tcx/activity/"
URL_GC_ORIGINAL_ACTIVITY = f"{BASE_URL}/download-service/files/activity/"
URL_DEVICE_DETAIL = f"{BASE_URL}/device-service/deviceservice/app-info/"
URL_GEAR_DETAIL = f"{BASE_URL}/gear-service/gear/filterGear?activityId="
class GarminError(Exception):
"""Exception for problems with Garmin Connect (connection, data consistency etc)."""
try:
garth.login(USERNAME, PASSWORD)
except Exception as ex:
msg = f"Authentication failure ({ex}). Did you enter correct credentials?"
raise GarminError(msg) from ex
print("Finish login post")
# We should be logged in now.
if not Path(ARGS.directory).is_dir():
Path(ARGS.directory).mkdir()
CSV_FILENAME = ARGS.directory + "/activities.csv"
CSV_EXISTED = Path(CSV_FILENAME).is_file()
with Path(CSV_FILENAME).open("a") as CSV_FILE:
# Write header to CSV file
if not CSV_EXISTED:
CSV_FILE.write(
"Activity name,Description,Bike,Begin timestamp,Duration (h:m:s),Moving"
" duration (h:m:s),Distance (km),Average speed (km/h),Average moving speed"
" (km/h),Max. speed (km/h),Elevation loss uncorrected (m),Elevation gain"
" uncorrected (m),Elevation min. uncorrected (m),Elevation max. uncorrected"
" (m),Min. heart rate (bpm),Max. heart rate (bpm),Average heart rate"
" (bpm),Calories,Avg. cadence (rpm),Max. cadence (rpm),Strokes,Avg. temp"
" (°C),Min. temp (°C),Max. temp (°C),Map,End timestamp,Begin timestamp (ms),End"
" timestamp (ms),Device,Activity type,Event type,Time zone,Begin latitude"
" (°DD),Begin longitude (°DD),End latitude (°DD),End longitude (°DD),Elevation"
" gain corrected (m),Elevation loss corrected (m),Elevation max. corrected"
" (m),Elevation min. corrected (m),Sample count\n",
)
DOWNLOAD_ALL = False
if ARGS.count == "all":
# If the user wants to download all activities, query the userstats
# on the profile page to know how many are available
print("Getting display name and user stats via: " + URL_GC_PROFILE)
PROFILE_PAGE = http_req(URL_GC_PROFILE).decode()
# write_to_file(args.directory + '/profile.html', profile_page, 'a')
# extract the display name from the profile page, it should be in there as
# \"displayName\":\"eschep\"
PATTERN = re.compile(
r'.*"displayName":"([-.\w]+)".*',
re.MULTILINE | re.DOTALL,
)
MATCH = PATTERN.match(PROFILE_PAGE)
if not MATCH:
msg = "Did not find the display name in the profile page."
raise Exception(msg)
DISPLAY_NAME = MATCH.group(1)
print("displayName=" + DISPLAY_NAME)
print(URL_GC_USERSTATS + DISPLAY_NAME)
USER_STATS = http_req(URL_GC_USERSTATS + DISPLAY_NAME)
print("Finished display name and user stats ~~~~~~~~~~~~~~~~~~~~~~~~~~~")
# Persist JSON
write_to_file(ARGS.directory + "/userstats.json", USER_STATS.decode(), "a")
# Modify total_to_download based on how many activities the server reports.
JSON_USER = json.loads(USER_STATS)
TOTAL_TO_DOWNLOAD = int(JSON_USER["userMetrics"][0]["totalActivities"])
else:
TOTAL_TO_DOWNLOAD = int(ARGS.count)
TOTAL_DOWNLOADED = 0
print("Total to download: " + str(TOTAL_TO_DOWNLOAD))
# This while loop will download data from the server in multiple chunks, if necessary.
while TOTAL_DOWNLOADED < TOTAL_TO_DOWNLOAD:
# Maximum chunk size 'limit_maximum' ... 400 return status if over maximum. So download
# maximum or whatever remains if less than maximum.
# As of 2018-03-06 I get return status 500 if over maximum
if TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED > LIMIT_MAXIMUM:
NUM_TO_DOWNLOAD = LIMIT_MAXIMUM
else:
NUM_TO_DOWNLOAD = TOTAL_TO_DOWNLOAD - TOTAL_DOWNLOADED
SEARCH_PARAMS = {"start": TOTAL_DOWNLOADED, "limit": NUM_TO_DOWNLOAD}
# Query Garmin Connect
print(
"Activity list URL: " + URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS),
)
ACTIVITY_LIST = http_req(URL_GC_LIST + urllib.parse.urlencode(SEARCH_PARAMS))
write_to_file(
ARGS.directory + "/activity_list.json",
ACTIVITY_LIST.decode(),
"a",
)
LIST = json.loads(ACTIVITY_LIST)
# print(LIST)
# Process each activity.
for a in LIST:
# Display which entry we're working on.
print("Garmin Connect activity: [" + str(a["activityId"]) + "]", end=" ")
print(a["activityName"])
# print("\t" + a["uploadDate"]["display"] + ",", end=" ")
if ARGS.format == "gpx":
data_filename = (
ARGS.directory + "/" + str(a["activityId"]) + "_activity.gpx"
)
download_url = URL_GC_GPX_ACTIVITY + str(a["activityId"]) + "?full=true"
print(download_url)
file_mode = "w"
elif ARGS.format == "tcx":
data_filename = (
ARGS.directory + "/" + str(a["activityId"]) + "_activity.tcx"
)
download_url = URL_GC_TCX_ACTIVITY + str(a["activityId"]) + "?full=true"
file_mode = "w"
elif ARGS.format == "original":
data_filename = (
ARGS.directory + "/" + str(a["activityId"]) + "_activity.zip"
)
fit_filename = (
ARGS.directory + "/" + str(a["activityId"]) + "_activity.fit"
)
download_url = URL_GC_ORIGINAL_ACTIVITY + str(a["activityId"])
file_mode = "wb"
else:
msg = "Unrecognized format."
raise Exception(msg)
if Path(data_filename).is_file():
print("\tData file already exists; skipping...")
continue
# Regardless of unzip setting, don't redownload if the ZIP or FIT file exists.
if ARGS.format == "original" and Path(fit_filename).is_file():
print("\tFIT data file already exists; skipping...")
continue
# Download the data file from Garmin Connect. If the download fails (e.g., due to timeout),
# this script will die, but nothing will have been written to disk about this activity, so
# just running it again should pick up where it left off.
print("\tDownloading file...", end=" ")
try:
data = http_req(download_url)
except urllib.error.HTTPError as errs:
# Handle expected (though unfortunate) error codes; die on unexpected ones.
if errs.code == HTTP_INTERNAL_SERVER_ERROR and ARGS.format == "tcx":
# Garmin will give an internal server error (HTTP 500) when downloading TCX files
# if the original was a manual GPX upload. Writing an empty file prevents this file
# from being redownloaded, similar to the way GPX files are saved even when there
# are no tracks. One could be generated here, but that's a bit much. Use the GPX
# format if you want actual data in every file, as I believe Garmin provides a GPX
# file for every activity.
print(
"Writing empty file since Garmin did not generate a TCX file for"
" this activity...",
end=" ",
)
data = ""
elif errs.code == HTTP_NOT_FOUND and ARGS.format == "original":
# For manual activities (i.e., entered in online without a file upload), there is
# no original file. # Write an empty file to prevent redownloading it.
print(
"Writing empty file since there was no original activity data...",
end=" ",
)
data = ""
else:
raise Exception(
"Failed. Got an unexpected HTTP error ("
+ str(errs.code)
+ download_url
+ ").",
)
# Persist file
write_to_file(data_filename, decoding_decider(data), file_mode)
print("Activity summary URL: " + URL_GC_ACTIVITY + str(a["activityId"]))
ACTIVITY_SUMMARY = http_req(URL_GC_ACTIVITY + str(a["activityId"]))
write_to_file(
ARGS.directory + "/" + str(a["activityId"]) + "_activity_summary.json",
ACTIVITY_SUMMARY.decode(),
"a",
)
JSON_SUMMARY = json.loads(ACTIVITY_SUMMARY)
# print(JSON_SUMMARY)
print(
"Device detail URL: "
+ URL_DEVICE_DETAIL
+ str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]),
)
DEVICE_DETAIL = http_req(
URL_DEVICE_DETAIL
+ str(JSON_SUMMARY["metadataDTO"]["deviceApplicationInstallationId"]),
)
if DEVICE_DETAIL:
write_to_file(
ARGS.directory + "/" + str(a["activityId"]) + "_app_info.json",
DEVICE_DETAIL.decode(),
"a",
)
JSON_DEVICE = json.loads(DEVICE_DETAIL)
# print(JSON_DEVICE)
else:
print("Retrieving Device Details failed.")
JSON_DEVICE = None
print(
"Activity details URL: "
+ URL_GC_ACTIVITY
+ str(a["activityId"])
+ "/details",
)
try:
ACTIVITY_DETAIL = http_req(
URL_GC_ACTIVITY + str(a["activityId"]) + "/details",
)
write_to_file(
ARGS.directory
+ "/"
+ str(a["activityId"])
+ "_activity_detail.json",
ACTIVITY_DETAIL.decode(),
"a",
)
JSON_DETAIL = json.loads(ACTIVITY_DETAIL)
# print(JSON_DETAIL)
except HTTPError as ex:
if hasattr(ex, "code"):
logging.error("Retrieving Activity Details failed, error: %s", ex)
raise
JSON_DETAIL = None
print("Gear details URL: " + URL_GEAR_DETAIL + str(a["activityId"]))
try:
GEAR_DETAIL = http_req(URL_GEAR_DETAIL + str(a["activityId"]))
write_to_file(
ARGS.directory + "/" + str(a["activityId"]) + "_gear_detail.json",
GEAR_DETAIL.decode(),
"a",
)
JSON_GEAR = json.loads(GEAR_DETAIL)
# print(JSON_GEAR)
except HTTPError as ex:
if hasattr(ex, "code"):
logging.error("Retrieving Gear Details failed, error: %s", ex)
raise
JSON_GEAR = None
# Write stats to CSV.
empty_record = ","
csv_record = ""
csv_record += (
empty_record
if "activityName" not in a or not a["activityName"]
else '"' + a["activityName"].replace('"', '""') + '",'
)
# maybe a more elegant way of coding this but need to handle description as null
if "description" not in a:
csv_record += empty_record
elif a["description"] is not None:
csv_record += '"' + a["description"].replace('"', '""') + '",'
else:
csv_record += empty_record
# Gear detail returned as an array so pick the first one
csv_record += (
empty_record
if not JSON_GEAR or "customMakeModel" not in JSON_GEAR[0]
else JSON_GEAR[0]["customMakeModel"] + ","
)
csv_record += (
empty_record
if "startTimeLocal" not in JSON_SUMMARY["summaryDTO"]
else '"' + JSON_SUMMARY["summaryDTO"]["startTimeLocal"] + '",'
)
csv_record += (
empty_record
if "elapsedDuration" not in JSON_SUMMARY["summaryDTO"]
else hhmmss_from_seconds(JSON_SUMMARY["summaryDTO"]["elapsedDuration"])
+ ","
)
csv_record += (
empty_record
if "movingDuration" not in JSON_SUMMARY["summaryDTO"]
else hhmmss_from_seconds(JSON_SUMMARY["summaryDTO"]["movingDuration"])
+ ","
)
csv_record += (
empty_record
if "distance" not in JSON_SUMMARY["summaryDTO"]
else "{0:.5f}".format(JSON_SUMMARY["summaryDTO"]["distance"] / 1000)
+ ","
)
csv_record += (
empty_record
if "averageSpeed" not in JSON_SUMMARY["summaryDTO"]
else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["averageSpeed"]) + ","
)
csv_record += (
empty_record
if "averageMovingSpeed" not in JSON_SUMMARY["summaryDTO"]
else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["averageMovingSpeed"])
+ ","
)
csv_record += (
empty_record
if "maxSpeed" not in JSON_SUMMARY["summaryDTO"]
else kmh_from_mps(JSON_SUMMARY["summaryDTO"]["maxSpeed"]) + ","
)
csv_record += (
empty_record
if "elevationLoss" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["elevationLoss"]) + ","
)
csv_record += (
empty_record
if "elevationGain" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["elevationGain"]) + ","
)
csv_record += (
empty_record
if "minElevation" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["minElevation"]) + ","
)
csv_record += (
empty_record
if "maxElevation" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["maxElevation"]) + ","
)
csv_record += (
empty_record if "minHR" not in JSON_SUMMARY["summaryDTO"] else ","
)
csv_record += (
empty_record
if "maxHR" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["maxHR"]) + ","
)
csv_record += (
empty_record
if "averageHR" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["averageHR"]) + ","
)
csv_record += (
empty_record
if "calories" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["calories"]) + ","
)
csv_record += (
empty_record
if "averageBikeCadence" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["averageBikeCadence"]) + ","
)
csv_record += (
empty_record
if "maxBikeCadence" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["maxBikeCadence"]) + ","
)
csv_record += (
empty_record
if "totalNumberOfStrokes" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["totalNumberOfStrokes"]) + ","
)
csv_record += (
empty_record
if "averageTemperature" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["averageTemperature"]) + ","
)
csv_record += (
empty_record
if "minTemperature" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["minTemperature"]) + ","
)
csv_record += (
empty_record
if "maxTemperature" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["maxTemperature"]) + ","
)
csv_record += (
empty_record
if "activityId" not in a
else f"{BASE_URL}/activity-service/activity/"
+ str(a["activityId"])
+ ","
)
csv_record += (
empty_record
if "endTimestamp" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if "beginTimestamp" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if "endTimestamp" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if not JSON_DEVICE or "productDisplayName" not in JSON_DEVICE
else JSON_DEVICE["productDisplayName"]
+ " "
+ JSON_DEVICE["versionString"]
+ ","
)
csv_record += (
empty_record
if "activityType" not in a
else a["activityType"]["typeKey"].title() + ","
)
csv_record += (
empty_record
if "eventType" not in a
else a["eventType"]["typeKey"].title() + ","
)
csv_record += (
empty_record
if "timeZoneUnitDTO" not in JSON_SUMMARY
else JSON_SUMMARY["timeZoneUnitDTO"]["timeZone"] + ","
)
csv_record += (
empty_record
if "startLatitude" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["startLatitude"]) + ","
)
csv_record += (
empty_record
if "startLongitude" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["startLongitude"]) + ","
)
csv_record += (
empty_record
if "endLatitude" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["endLatitude"]) + ","
)
csv_record += (
empty_record
if "endLongitude" not in JSON_SUMMARY["summaryDTO"]
else str(JSON_SUMMARY["summaryDTO"]["endLongitude"]) + ","
)
csv_record += (
empty_record
if "gainCorrectedElevation" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if "lossCorrectedElevation" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if "maxCorrectedElevation" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if "minCorrectedElevation" not in JSON_SUMMARY["summaryDTO"]
else ","
)
csv_record += (
empty_record
if not JSON_DETAIL or "metricsCount" not in JSON_DETAIL
else str(JSON_DETAIL["metricsCount"]) + ","
)
csv_record += "\n"
CSV_FILE.write(csv_record)
if ARGS.format == "gpx" and data:
# Validate GPX data. If we have an activity without GPS data (e.g., running on a
# treadmill), Garmin Connect still kicks out a GPX (sometimes), but there is only
# activity information, no GPS data. N.B. You can omit the XML parse (and the
# associated log messages) to speed things up.
gpx = parseString(data)
if gpx.getElementsByTagName("trkpt"):
print("Done. GPX data saved.")
else:
print("Done. No track points found.")
elif ARGS.format == "original":
# Even manual upload of a GPX file is zipped, but we'll validate the extension.
if ARGS.unzip and data_filename[-3:].lower() == "zip":
print("Unzipping and removing original files...", end=" ")
print("Filesize is: " + str(Path(data_filename).stat().st_size))
if Path(data_filename).stat().st_size > 0:
with Path(data_filename).open("rb") as zip_file:
z = zipfile.ZipFile(zip_file)
for name in z.namelist():
z.extract(name, ARGS.directory)
zip_file.close()
else:
print("Skipping 0Kb zip file.")
Path(data_filename).unlink()
print("Done.")
else:
# TODO @moderation: Consider validating other formats.
print("Done.")
TOTAL_DOWNLOADED += NUM_TO_DOWNLOAD
# End while loop for multiple chunks.
CSV_FILE.close()
if len(ARGS.external):
print("Open CSV output.")
print(CSV_FILENAME)
# open CSV file. Comment this line out if you don't want this behavior
call([ARGS.external, "--" + ARGS.args, CSV_FILENAME])
print("Done!")