This repository has been archived by the owner on Jan 26, 2024. It is now read-only.
forked from arnaudlimbourg/dayone-to-obsidian
-
Notifications
You must be signed in to change notification settings - Fork 8
/
utils.py
453 lines (384 loc) · 17.3 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
# pylint: disable=too-many-nested-blocks,too-many-branches,too-many-locals,line-too-long,invalid-name,consider-using-f-string,no-member
"""utils.py"""
import json
import re
import shutil
from datetime import datetime
from pathlib import Path
from typing import Dict, Set, List
import dateutil.parser
import pytz
from attrs import define, field
from rich.progress import Progress
from rich_utils import info_msg, verbose_msg, warn_msg
@define
class Entry:
"""A single journal entry"""
uuid: str = field(default=None, eq=True)
has_yaml: bool = field(default=False, eq=False)
yaml: str = field(default="", eq=False)
metadata: dict = field(factory=dict, eq=False)
text: str = field(default="", eq=False)
output_file: Path = field(default=None, eq=False)
def __str__(self) -> str:
if self.has_yaml:
self.yaml = "---\n{yaml_block}\n---\n\n".format(
yaml_block="\n".join(
[
f"{name.lower().replace(' ', '_')}: {value}"
for name, value in self.metadata.items()
if name in ("location", "places", "dates", "tags")
]
)
)
metadata = [f"{key}:: {value}" for key, value in self.metadata.items()]
return "{yaml}{metadata}\n\n{text}\n".format(
yaml=self.yaml, metadata="\n".join(metadata), text=self.text, uuid=self.uuid
)
@classmethod
def from_metadata(cls, metadata: dict) -> "Entry":
"""Create a new `Entry` from a metadata dictionary"""
if not isinstance(metadata, dict):
raise TypeError(
f"Metadata must be of `dict` type, instead of {type(metadata)}."
)
entry = cls(uuid=metadata.pop("uuid", None), metadata=metadata)
if entry.uuid is not None:
entry.metadata["url"] = f"[DayOne](dayone://view?entryId={entry.uuid})"
return entry
def dump(self) -> None:
"""Write out entry to a file"""
if self.output_file is None:
raise RuntimeError("Entry output file is undefined!")
with self.output_file.open("w", encoding="utf-8") as file:
file.write(f"{self}")
def capwords(string: str, sep: str = "") -> str:
"""Capitalize the first letter of each word in a string"""
return sep.join(word[0].upper() + word[1:].lower() for word in string.split(" "))
# TODO: refactor this function into a method of the Entry class
def retrieve_metadata(
entry: Dict,
local_date: datetime,
tag_prefix: str,
ignore_tags: Set,
status_tags: Set,
extra_tags: List,
verbose: int,
journal_name: str,
) -> Dict:
"""Fetch the metadata of a single journal entry"""
metadata = {}
metadata["uuid"] = entry["uuid"]
# Add raw create datetime adjusted for timezone and identify timezone
metadata["dates"] = local_date.strftime("%Y-%m-%d")
metadata["time"] = local_date.strftime("%H:%M:%S")
# metadata["timezone"] = entry["timeZone"]
# Add location
location = []
try:
location = list(
filter(
bool,
[
entry["location"].get(key, None)
for key in (
"placeName",
"localityName",
"administrativeArea",
"country",
)
],
)
)
except KeyError:
if verbose and verbose > 1:
verbose_msg(
f"Entry with date '{local_date.strftime('%Y-%m-%d')}' has no location!"
)
metadata["places"] = ", ".join(location)
# Add GPS, not all entries have this
if "location" in entry and all(
["latitude" in entry["location"], "longitude" in entry["location"]]
):
loc = entry["location"]
metadata["location"] = f"{loc.get('latitude')}, {loc.get('longitude')}"
# Add weather information if present
if "weather" in entry and all(
i in entry["weather"]
for i in ["conditionsDescription", "temperatureCelsius", "windSpeedKPH"]
):
weather = entry["weather"]
metadata[
"weather"
] = f"{weather['conditionsDescription']}, {round(weather['temperatureCelsius'], 1)}°C, {round(weather['windSpeedKPH'], 1)} km/h wind"
# Process tags
tags = []
# First tag is the journal name, if present
if journal_name is not None:
tags.append(capwords(f"#journal/{journal_name}"))
if (entry_tags := entry.get("tags", None)) is not None:
entry_tags = set(entry_tags)
for tag in (entry_tags - ignore_tags) - status_tags:
# format the tag: remove spaces and capitalize each word
# Example: #Original tag --> #{prefix}/originalTag
new_tag = capwords(f"{tag_prefix}{tag}")
tags.append(new_tag)
# Handle status tags
if status_tags := entry_tags & status_tags:
tags.extend(map(lambda tag: capwords("#status/" + tag), status_tags))
# Add a tag for the location to make places searchable in Obsidian
if location:
tags.append(f"#places/{'/'.join(map(capwords, location[-1:0:-1]))}")
# Add a :star: emoji for starred entries
if entry["starred"]:
tags.append("#\u2B50")
# Add extra tags, if any
if extra_tags is not None:
tags.extend(extra_tags)
# Build the final string with all the tags
if tags:
metadata["tags"] = ", ".join(tags)
return metadata
def process_journal(
progress: Progress,
journal: Path,
vault_directory: Path,
tag_prefix: str,
verbose: int,
convert_links: bool,
yaml: bool,
force: bool,
merge_entries: bool,
entries_sep: str,
ignore_tags: Set,
status_tags: Set,
metadata_ext: Dict,
) -> None:
"""Process a journal JSON file"""
# name of folder where journal entries will end up in your Obsidian vault
journal_name = journal.stem.lower()
base_folder = journal.resolve().parent
journal_folder = base_folder / journal_name
# Clean out existing journal folder, otherwise each run creates new files
if journal_folder.exists():
if verbose > 0:
warn_msg(f"Deleting existing folder '{journal_folder}'")
shutil.rmtree(journal_folder)
if verbose > 0:
info_msg(f"Creating '{journal_folder}'")
journal_folder.mkdir()
# All entries processed will be added to a dictionary
entries = {}
merged_entries = 0
# Mapping between entries UUIDs and Markdown files
# Needed to perform DayOne -> Obsidian links conversion
uuid_to_file = {}
with open(journal, encoding="utf-8") as json_file:
data: Dict = json.load(json_file)
task = progress.add_task(
f"[bold green]Processing entries of '[cyan][not bold]{journal.name}[/not bold][/cyan]'",
total=len(data["entries"]),
)
# Are there additional tags in the config file?
if metadata_ext is not None:
extra_tags = metadata_ext.pop("tags", None)
else:
extra_tags = None
entry: Dict
for entry in data["entries"]:
creation_date = dateutil.parser.isoparse(entry["creationDate"])
local_date = creation_date.astimezone(
pytz.timezone(entry["timeZone"])
) # It's natural to use our local date/time as reference point, not UTC
# Fetch entry's metadata
metadata = retrieve_metadata(
entry,
local_date,
tag_prefix,
extra_tags=extra_tags,
ignore_tags=ignore_tags,
status_tags=status_tags,
journal_name=journal_name,
verbose=verbose,
)
# Create a new Entry and add metadata
new_entry = Entry.from_metadata(metadata)
# Add any other metadata field found in the config file
if metadata_ext is not None:
new_entry.metadata.update(metadata_ext)
# Turn on YAML front matter, if requested
new_entry.has_yaml = yaml
# Add body text if it exists (entries can have a "blank body" sometimes), after some tidying up
entry_text: str
if (entry_text := entry.get("text", None)) is not None:
new_text = entry_text.replace("\\", "")
new_text = new_text.replace("\u2028", "\n")
new_text = new_text.replace("\u1C6A", "\n\n")
new_text = new_text.replace("\u200b", "")
# TODO: fix multiple, consecutive newlines as well, e.g., \n\n\n\n -> \n
# Fixes multi-line ```code blocks```
# DayOne breaks these block in many lines with a triple ``` delimiters.
# This results in a bad formatting of the Markdown output.
new_text = re.sub(r"```\s+```", "", new_text, flags=re.MULTILINE)
# Handling attachments: photos, audios, videos, and documents (PDF)
if "photos" in entry:
# Correct photo links. The filename is the md5 code, not the identifier used in the text
for photo in entry["photos"]:
image_type = photo["type"]
original_photo_file = (
base_folder / "photos" / f"{photo['md5']}.{image_type}"
)
renamed_photo_file = (
base_folder
/ "photos"
/ f"{photo['identifier']}.{image_type}"
)
if original_photo_file.exists():
if verbose > 1:
verbose_msg(
f"Renaming {original_photo_file} to {renamed_photo_file}"
)
original_photo_file.rename(renamed_photo_file)
new_text = re.sub(
r"(\!\[\]\(dayone-moment:\/\/)([A-F0-9]+)\)",
(rf"![[\2.{image_type}]]"),
new_text,
)
if "pdfAttachments" in entry:
# Correct photo pdf links. Similar to what is done on photos
for pdf in entry["pdfAttachments"]:
original_pdf_file = base_folder / "pdfs" / f"{pdf['md5']}.pdf"
renamed_pdf_file = (
base_folder / "pdfs" / f"{pdf['identifier']}.pdf"
)
if original_pdf_file.exists():
if verbose > 1:
verbose_msg(
f"Renaming {original_pdf_file} to {renamed_pdf_file}"
)
original_pdf_file.rename(renamed_pdf_file)
new_text = re.sub(
r"(\!\[\]\(dayone-moment:\/+pdfAttachment\/)([A-F0-9]+)\)",
r"![[\2.pdf]]",
new_text,
)
if "audios" in entry:
for audio in entry["audios"]:
# Audio type is missing in DayOne JSON
# AAC files are very often saved with .m4a extension
audio_format = "m4a"
original_audio_file = (
base_folder / "audios" / f"{audio['md5']}.{audio_format}"
)
renamed_audio_file = (
base_folder
/ "audios"
/ f"{audio['identifier']}.{audio_format}"
)
if original_audio_file.exists():
if verbose > 1:
verbose_msg(
f"Renaming {original_audio_file} to {renamed_audio_file}"
)
original_audio_file.rename(renamed_audio_file)
new_text = re.sub(
r"(\!\[\]\(dayone-moment:\/+audio/)([A-F0-9]+)\)",
rf"![[\2.{audio_format}]]",
new_text,
)
if "videos" in entry:
for video in entry["videos"]:
video_format = video["type"]
original_video_file = (
base_folder / "videos" / f"{video['md5']}.{video_format}"
)
renamed_video_file = (
base_folder
/ "videos"
/ f"{video['identifier']}.{video_format}"
)
if original_video_file.exists():
if verbose > 1:
verbose_msg(
f"Renaming {original_video_file} to {renamed_video_file}"
)
original_video_file.rename(renamed_video_file)
new_text = re.sub(
r"(\!\[\]\(dayone-moment:\/+video/)([A-F0-9]+)\)",
rf"![[\2.{video_format}]]",
new_text,
)
new_entry.text = new_text
# Save entries organised by year, year-month, year-month-day.md
year_dir = journal_folder / str(creation_date.year)
month_dir = year_dir / creation_date.strftime("%Y-%m")
if not month_dir.is_dir():
month_dir.mkdir(parents=True)
# Target filename to save to
file_date_format = local_date.strftime("%Y-%m-%d")
target_file = month_dir / f"{file_date_format}.md"
new_entry.output_file = target_file
# Relative path, to check if this entry is already present in the vault directory
target_file_rel = (
Path(journal_name)
/ f"{creation_date.strftime('%Y/%Y-%m')}"
/ f"{file_date_format}.md"
)
# Skip files already present in the vault directory
if (
vault_directory is None
or force
or not (Path(vault_directory).expanduser() / target_file_rel).exists()
):
# Here is where we handle multiple entries on the same day. Each goes to it's own file
if target_file.stem in entries:
if verbose > 1:
warn_msg(
f"Found another entry with the same date '{target_file.stem}'"
)
if merge_entries:
merged_entries += 1
prev_entry: Entry = entries.pop(target_file.stem)
del prev_entry.metadata["dates"]
new_entry.text += f"\n\n{entries_sep}\n\n{prev_entry}"
else:
# File exists, need to find the next in sequence and append alpha character marker
index = 97 # ASCII a
target_file = month_dir / f"{file_date_format}{chr(index)}.md"
while target_file.stem in entries:
index += 1
target_file = (
month_dir / f"{file_date_format}{chr(index)}.md"
)
new_entry.output_file = target_file
# Add current entry's to entries dict
entries[target_file.stem] = new_entry
# Step 1 to replace dayone internal links to other entries with proper Obsidian [[links]]
uuid_to_file[new_entry.uuid] = target_file.name
else:
if verbose > 1:
verbose_msg(
f"File '{target_file_rel}' already exists in vault directory!"
)
progress.update(task, advance=1)
# Rename JSON file to avoid reprocessing if the script is run twice
num_files = len(list(base_folder.glob(f"*{journal.stem}.json")))
journal.rename(base_folder / f"{num_files - 1}_{journal.name}")
def replace_link(match: re.Match) -> str:
"""A replacement function for dayone internal links"""
link_text, uuid = match.groups()
if uuid in uuid_to_file:
return f"[[{uuid_to_file[uuid]}|{link_text}]]"
return f"^[Linked entry with UUID `{uuid}` not found]"
entry: Entry
for entry in entries.values():
if convert_links:
# Step 2 to replace dayone internal links: we must do a second iteration over entries
# The regex to match a dayone internal link: [link_text](dayone://view?EntryId=uuid)
entry.text = re.sub(
r"\[(.*?)\]\(dayone2?:\/\/.*?([A-F0-9]+)\)", replace_link, entry.text
)
entry.dump()
info_msg(
f":white_check_mark: {len(entries)}/{len(data['entries'])}{f' ({merged_entries} merged)' if merge_entries else ''} entries have been exported to '{journal_folder}'"
)