Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Save synced filenams and skip sync next times #454

Merged
Merged
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
"license": "MIT",
"private": true,
"scripts": {
"data:clean": "rm scripts/data.db GPX_OUT/* activities/* src/static/activities.json",
"data:clean": "rm scripts/data.db {GPX,TCX,FIT}_OUT/* activities/* src/static/activities.json",
"data:download:garmin": "python3 scripts/garmin_sync.py",
"data:analysis": "python3 scripts/gen_svg.py --from-db --type github --output assets/github.svg",
"build": "gatsby clean && gatsby build",
Expand Down
1 change: 1 addition & 0 deletions scripts/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
}
SQL_FILE = os.path.join(parent, "scripts", "data.db")
JSON_FILE = os.path.join(parent, "src", "static", "activities.json")
SYNCED_FILE = os.path.join(parent, "imported.json")
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Where do you want to save SYNCED_FILE? Maybe use FIT_FOLDER instead of parent?


# TODO: Move into nike_sync
BASE_URL = "https://api.nike.com/sport/v3/me"
Expand Down
10 changes: 10 additions & 0 deletions scripts/generator/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@

from .db import Activity, init_db, update_or_create_activity

from synced_data_file_logger import save_synced_data_file_list


IGNORE_BEFORE_SAVING = os.getenv("IGNORE_BEFORE_SAVING", False)

Expand Down Expand Up @@ -78,27 +80,35 @@ def sync_from_data_dir(self, data_dir, file_suffix="gpx"):
if not tracks:
print("No tracks found.")
return

synced_files = []

for t in tracks:
created = update_or_create_activity(self.session, t.to_namedtuple())
if created:
sys.stdout.write("+")
else:
sys.stdout.write(".")
synced_files.extend(t.file_names)
sys.stdout.flush()

save_synced_data_file_list(synced_files)

self.session.commit()

def sync_from_app(self, app_tracks):
if not app_tracks:
print("No tracks found.")
return
print("Syncing tracks '+' means new track '.' means update tracks")
synced_files = []
for t in app_tracks:
created = update_or_create_activity(self.session, t)
if created:
sys.stdout.write("+")
else:
sys.stdout.write(".")
synced_files.extend(t.file_names)
sys.stdout.flush()

self.session.commit()
Expand Down
5 changes: 5 additions & 0 deletions scripts/gpxtrackposter/track_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@
from .track import Track
from .year_range import YearRange

from synced_data_file_logger import load_synced_file_list

log = logging.getLogger(__name__)


Expand Down Expand Up @@ -167,12 +169,15 @@ def _load_data_tracks(file_names, load_func=load_gpx_file):

@staticmethod
def _list_data_files(data_dir, file_suffix):
synced_files = load_synced_file_list()
data_dir = os.path.abspath(data_dir)
if not os.path.isdir(data_dir):
raise ParameterError(f"Not a directory: {data_dir}")
for name in os.listdir(data_dir):
if name.startswith("."):
continue
if name in synced_files:
continue
path_name = os.path.join(data_dir, name)
if name.endswith(f".{file_suffix}") and os.path.isfile(path_name):
yield path_name
24 changes: 24 additions & 0 deletions scripts/synced_data_file_logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import os
from config import SYNCED_FILE
import json


def save_synced_data_file_list(file_list: list):
old_list = load_synced_file_list()

with open(SYNCED_FILE, "w") as f:
file_list.extend(old_list)

json.dump(file_list, f)


def load_synced_file_list():
if os.path.exists(SYNCED_FILE):
with open(SYNCED_FILE, "r") as f:
try:
return json.load(f)
except Exception as e:
print(f"json load {SYNCED_FILE} \nerror {e}")
pass

return []