From 85819545122cc6d876c571cad896d54b9cbbcac3 Mon Sep 17 00:00:00 2001
From: Gambhiro
Date: Sun, 10 Nov 2019 14:06:44 +0000
Subject: [PATCH 01/10] output_format
---
README.md | 6 +-
assets/OEBPS/package.opf | 9 +--
assets/simsapa_md_to_epub.bat | 2 +-
assets/simsapa_md_to_epub.sh | 2 +-
assets/simsapa_md_to_mobi.bat | 2 +-
assets/simsapa_md_to_mobi.sh | 2 +-
scripts/build_dicts_for_release.sh | 8 +--
scripts/combined_to_mobi.sh | 2 +-
scripts/ncped_to_epub.sh | 2 +-
scripts/ncped_to_mobi.sh | 2 +-
src/app.rs | 59 ++++++-----------
src/cli.yml | 8 +--
src/ebook.rs | 60 +++++++++---------
src/helpers.rs | 8 +--
src/main.rs | 10 +--
.../data with space/ncped custom cover.md | 2 -
.../data/data with space/ncped with space.md | 2 -
.../data with space/ncped with space.xlsx | Bin 7069 -> 7026 bytes
tests/run_tests.sh | 6 +-
19 files changed, 81 insertions(+), 111 deletions(-)
diff --git a/README.md b/README.md
index f0c103d..37d4ea2 100644
--- a/README.md
+++ b/README.md
@@ -75,8 +75,6 @@ ndped-example.md
book_id = "NcpedDictionarySimsapa"
created_date_human = ""
created_date_opf = ""
- is_epub = true
- is_mobi = false
```
--- DICTIONARY WORD ENTRIES ---
@@ -162,7 +160,7 @@ SUBCOMMANDS:
Process a Markdown file and generate an EPUB or MOBI dictionary.
USAGE:
- simsapa_dictionary markdown_to_ebook [FLAGS] [OPTIONS] --ebook_format
+ simsapa_dictionary markdown_to_ebook [FLAGS] [OPTIONS] --output_format
FLAGS:
--dont_remove_generated_files Turns off the removal of the generated OPF, HTML, etc. files used to create the
@@ -176,7 +174,7 @@ FLAGS:
OPTIONS:
--dict_label
+{{/if ~}}
{{grammar_phonetic_transliteration word_header ../meta.add_velthuis ~}}
{{word_list "Also written as:" word_header.also_written_as ~}}
{{markdown definition_md ~}}
diff --git a/scripts/build_dicts_for_release.sh b/scripts/build_dicts_for_release.sh
index 951dbb5..20e6af4 100755
--- a/scripts/build_dicts_for_release.sh
+++ b/scripts/build_dicts_for_release.sh
@@ -3,11 +3,22 @@
SRC_DIR=../simsapa-dictionary-data
OUT_DIR=../simsapa-dictionary_releases/new-release
+if [ -z "$OUT_DIR" ]; then
+ rm -r "$OUT_DIR"
+fi
+
+mkdir -p "$OUT_DIR"
+
+PROJ_ROOT=$(pwd)
+
KINDLEGEN_PATH="$HOME/lib/kindlegen/kindlegen"
-# === Combined ===
+STARDICT_TEXT2BIN="/usr/lib/stardict-tools/stardict-text2bin"
+
+# === Individual ===
for i in dhammika dppn ncped nyana pts; do
+ cd "$PROJ_ROOT"
cargo run -- markdown_to_ebook \
--source_path "$SRC_DIR/$i.md" \
@@ -23,21 +34,58 @@ for i in dhammika dppn ncped nyana pts; do
--output_format epub \
--output_path "$OUT_DIR/$i.epub"
+ # TODO epubcheck
+
+ stardict_out="$OUT_DIR/$i-stardict"
+ mkdir -p "$stardict_out"
+
+ cargo run -- markdown_to_stardict_xml \
+ --source_path "$SRC_DIR/$i.md" \
+ --output_path "$stardict_out/$i.xml"
+
+ cd "$stardict_out"
+ $STARDICT_TEXT2BIN "$i.xml" "$i.ifo"
+ rm "$i.xml"
+ cd ..
+ zip -r "$i-stardict.zip" "$i-stardict"
+ rm "$i-stardict" -r
+
done
# === Combined ===
+cd "$PROJ_ROOT"
+
+name="combined-dictionary"
+
cargo run -- markdown_to_ebook \
- --source_paths_list ./scripts/combined_dict_md_paths.txt \
--title "Combined Pali - English Dictionary" \
+ --source_paths_list ./scripts/combined_dict_md_paths.txt \
--output_format mobi \
- --output_path "$OUT_DIR/combined-dictionary.mobi" \
+ --output_path "$OUT_DIR/$name.mobi" \
--mobi_compression 0 \
--kindlegen_path "$KINDLEGEN_PATH"
cargo run -- markdown_to_ebook \
- --source_paths_list ./scripts/combined_dict_md_paths.txt \
--title "Combined Pali - English Dictionary" \
+ --source_paths_list ./scripts/combined_dict_md_paths.txt \
--output_format epub \
- --output_path "$OUT_DIR/combined-dictionary.epub"
+ --output_path "$OUT_DIR/$name.epub"
+
+# TODO epubcheck
+
+stardict_out="$OUT_DIR/$name-stardict"
+mkdir -p "$stardict_out"
+
+cargo run -- markdown_to_stardict_xml \
+ --title "Combined Pali - English Dictionary" \
+ --source_paths_list ./scripts/combined_dict_md_paths.txt \
+ --output_path "$stardict_out/$name.xml"
+
+cd "$stardict_out"
+$STARDICT_TEXT2BIN "$name.xml" "$name.ifo"
+rm "$name.xml"
+cd ..
+zip -r "$name-stardict.zip" "$name-stardict"
+rm "$name-stardict" -r
diff --git a/scripts/json_to_md.sh b/scripts/json_to_md.sh
index e9afe4a..f0eb0c4 100755
--- a/scripts/json_to_md.sh
+++ b/scripts/json_to_md.sh
@@ -2,25 +2,67 @@
#RUST_LOG=sources_to_markdown=info cargo run --bin sources_to_markdown 2>&1 | tee sources_to_markdown.log
-SC_ROOT="$HOME/src/suttacentral-2018-09-03/sc-data/dictionaries/en"
+SC_ROOT="$HOME/src/suttacentral-2019-11-15/sc-data/dictionaries/en"
OUT_DIR="../simsapa-dictionary-data"
+# === Dhammika ===
+
+cargo run -- suttacentral_json_to_markdown \
+ --title "Dhammika Pali - English Dictionary" \
+ --dict_label Dhammika \
+ --json_path "$SC_ROOT/dhammika.json" \
+ --output_path "$OUT_DIR/dhammika.md"
+
cargo run -- suttacentral_json_to_markdown \
+ --title "Dhammika Pali - English Dictionary" \
+ --dict_label Dhammika \
+ --dont_process \
--json_path "$SC_ROOT/dhammika.json" \
- --output_path "$OUT_DIR/dhammika.md" \
- --dict_label Dhammika
+ --output_path "$OUT_DIR/dhammika_unprocessed.md"
+
+# === DPPN ===
+
+cargo run -- suttacentral_json_to_markdown \
+ --title "Dictionary of Pali Proper Names (DPPN)" \
+ --dict_label DPPN \
+ --json_path "$SC_ROOT/dppn.json" \
+ --output_path "$OUT_DIR/dppn.md"
cargo run -- suttacentral_json_to_markdown \
+ --title "Dictionary of Pali Proper Names (DPPN)" \
+ --dict_label DPPN \
+ --dont_process \
--json_path "$SC_ROOT/dppn.json" \
- --output_path "$OUT_DIR/dppn.md" \
- --dict_label DPPN
+ --output_path "$OUT_DIR/dppn_unprocessed.md"
+
+# === NCPED ===
+
+cargo run -- suttacentral_json_to_markdown \
+ --title "New Concise Pali - English Dictionary (NCPED)" \
+ --dict_label NCPED \
+ --json_path "$SC_ROOT/ncped.json" \
+ --output_path "$OUT_DIR/ncped.md"
cargo run -- suttacentral_json_to_markdown \
+ --title "New Concise Pali - English Dictionary (NCPED)" \
+ --dict_label NCPED \
+ --dont_process \
--json_path "$SC_ROOT/ncped.json" \
- --output_path "$OUT_DIR/ncped.md" \
- --dict_label NCPED
+ --output_path "$OUT_DIR/ncped_unprocessed.md"
+
+# === PTS ===
+
+cargo run -- suttacentral_json_to_markdown \
+ --title "Pali Text Society Pali - English Dictionary (PTS)" \
+ --dict_label PTS \
+ --dont_remove_see_also \
+ --json_path "$SC_ROOT/pts.json" \
+ --output_path "$OUT_DIR/pts.md"
cargo run -- suttacentral_json_to_markdown \
+ --title "Pali Text Society Pali - English Dictionary (PTS)" \
+ --dict_label PTS \
+ --dont_process \
+ --dont_remove_see_also \
--json_path "$SC_ROOT/pts.json" \
- --output_path "$OUT_DIR/pts.md" \
- --dict_label PTS
+ --output_path "$OUT_DIR/pts_unprocessed.md"
diff --git a/scripts/ncped_json_to_md.sh b/scripts/ncped_json_to_md.sh
new file mode 100755
index 0000000..8e02434
--- /dev/null
+++ b/scripts/ncped_json_to_md.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+#RUST_LOG=sources_to_markdown=info cargo run --bin sources_to_markdown 2>&1 | tee sources_to_markdown.log
+
+SC_ROOT="$HOME/src/suttacentral-2019-11-15/sc-data/dictionaries/en"
+OUT_DIR="../simsapa-dictionary-data"
+
+cargo run -- suttacentral_json_to_markdown \
+ --json_path "$SC_ROOT/ncped.json" \
+ --output_path "$OUT_DIR/ncped.md" \
+ --dict_label NCPED
+
diff --git a/scripts/nyana_to_md.sh b/scripts/nyana_to_md.sh
index 153365e..c7cec95 100755
--- a/scripts/nyana_to_md.sh
+++ b/scripts/nyana_to_md.sh
@@ -4,7 +4,8 @@ NYANA_ROOT="$HOME/src/dict-nyanatiloka"
OUT_DIR="../simsapa-dictionary-data"
cargo run -- nyanatiloka_to_markdown \
+ --title "Nyanatiloka Buddhist Dictionary" \
+ --dict_label Nyana \
--nyanatiloka_root "$NYANA_ROOT" \
- --output_path "$OUT_DIR/nyana.md" \
- --dict_label Nyana
+ --output_path "$OUT_DIR/nyana.md"
diff --git a/src/app.rs b/src/app.rs
index 6811125..c56891d 100644
--- a/src/app.rs
+++ b/src/app.rs
@@ -29,6 +29,7 @@ pub struct AppStartParams {
pub dont_run_kindlegen: bool,
pub dont_remove_generated_files: bool,
pub dont_process: bool,
+ pub dont_remove_see_also: bool,
pub run_command: RunCommand,
pub show_logs: bool,
pub zip_with: ZipWith,
@@ -82,6 +83,7 @@ impl Default for AppStartParams {
dont_run_kindlegen: false,
dont_remove_generated_files: false,
dont_process: false,
+ dont_remove_see_also: false,
run_command: RunCommand::NoOp,
show_logs: false,
zip_with,
@@ -589,6 +591,14 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result()
+ {
+ params.title = Some(x);
+ }
+
if let Ok(x) = sub_matches
.value_of("dict_label")
.unwrap()
@@ -601,6 +611,10 @@ pub fn process_cli_args(matches: clap::ArgMatches) -> Result Result()
+ {
+ params.title = Some(x);
+ }
+
if let Ok(x) = sub_matches
.value_of("dict_label")
.unwrap()
@@ -690,7 +712,7 @@ pub fn process_suttacentral_json(
for e in entries.iter() {
let new_word = DictWord {
word_header: DictWordHeader {
- dict_label: dict_label.to_string(),
+ dict_label: (*dict_label).to_string(),
word: e.word.to_lowercase(),
summary: "".to_string(),
grammar: "".to_string(),
@@ -763,7 +785,7 @@ pub fn process_nyanatiloka_entries(
for e in entries.iter() {
let new_word = DictWord {
word_header: DictWordHeader {
- dict_label: dict_label.to_string(),
+ dict_label: (*dict_label).to_string(),
word: e.word.to_lowercase(),
summary: "".to_string(),
grammar: "".to_string(),
@@ -824,9 +846,9 @@ pub fn process_markdown(source_path: &PathBuf, ebook: &mut Ebook) -> Result<(),
return Err(Box::new(ToolError::Exit(msg)));
}
- let a = parts
+ let a = (*parts
.get(0)
- .unwrap()
+ .unwrap())
.to_string()
.replace(DICTIONARY_METADATA_SEP, "")
.replace("``` toml", "")
@@ -834,7 +856,7 @@ pub fn process_markdown(source_path: &PathBuf, ebook: &mut Ebook) -> Result<(),
ebook.meta = parse_str_to_metadata(&a)?;
- let a = parts.get(1).unwrap().to_string();
+ let a = (*parts.get(1).unwrap()).to_string();
let entries: Vec>> = a
.split("``` toml")
.filter_map(|s| {
diff --git a/src/cli.yml b/src/cli.yml
index e03ca23..b299af8 100644
--- a/src/cli.yml
+++ b/src/cli.yml
@@ -1,5 +1,5 @@
name: "Simsapa Dictionary Tool"
-version: "0.1.0"
+version: "0.2.0-alpha.1"
author: "https://simsapa.github.io/"
about: "Generating dictionaries in various formats"
@@ -362,6 +362,13 @@ subcommands:
required: true
takes_value: true
+ - title:
+ help: "Use this title for the dictionary."
+ long: title
+ value_name: TITLE
+ required: false
+ takes_value: true
+
- dict_label:
help: "The short id label (such as NCPED or PED) to use for this dictionary source."
long: dict_label
@@ -375,6 +382,12 @@ subcommands:
required: false
takes_value: false
+ - dont_remove_see_also:
+ help: "Don't remove the '(See also...)' type text when processing."
+ long: dont_remove_see_also
+ required: false
+ takes_value: false
+
- nyanatiloka_to_markdown:
about: "Process Ven. Nyanatiloka's Buddhist Dictionary and write a Markdown file with TOML headers."
@@ -393,6 +406,13 @@ subcommands:
required: true
takes_value: true
+ - title:
+ help: "Use this title for the dictionary."
+ long: title
+ value_name: TITLE
+ required: false
+ takes_value: true
+
- dict_label:
help: "The short id label (such as 'Nyana') to use for this dictionary source."
long: dict_label
diff --git a/src/ebook.rs b/src/ebook.rs
index d8e3170..8411a2f 100644
--- a/src/ebook.rs
+++ b/src/ebook.rs
@@ -14,7 +14,7 @@ use deunicode::deunicode;
use crate::app::{AppStartParams, ZipWith};
use crate::dict_word::DictWord;
use crate::error::ToolError;
-use crate::helpers::{self, is_hidden, md2html};
+use crate::helpers::{self, is_hidden, md2html, uppercase_first_letter};
use crate::letter_groups::{LetterGroups, LetterGroup};
use crate::pali;
@@ -691,7 +691,7 @@ impl Ebook {
for filename in ["container.xml", "com.apple.ibooks.display-options.xml"].iter() {
let file_content = self
.asset_files_byte
- .get(&filename.to_string())
+ .get(&(*filename).to_string())
.ok_or("missing get key")?;
let mut file = File::create(dir.join(filename))?;
file.write_all(file_content)?;
@@ -987,6 +987,10 @@ impl Ebook {
let mut text = String::new();
+ if !word.word_header.dict_label.is_empty() {
+ text.push_str(&format!("[{}]
", &word.word_header.dict_label));
+ }
+
// grammar, phonetic, transliteration
let s = helpers::format_grammar_phonetic_transliteration(
&word.word_header.word,
@@ -1178,7 +1182,10 @@ impl Ebook {
// The simplest case, the whole word
// - abhijanat, abhikamin
// Don't match parens variations abhikami(n), which would leave only (n)
+ // abhijanat
s = s.trim_start_matches(&format!("{}\n", word)).trim().to_string();
+ // Abhijanat
+ s = s.trim_start_matches(&format!("{}\n", uppercase_first_letter(&word))).trim().to_string();
dict_word.definition_md = s;
}
@@ -1271,7 +1278,7 @@ impl Ebook {
}
}
- pub fn process_see_also_from_definition(&mut self) {
+ pub fn process_see_also_from_definition(&mut self, dont_remove_see_also: bool) {
info!("process_see_also_from_definition()");
// [ab(b)ha(t)](/define/ab(b)ha(t))
@@ -1345,7 +1352,9 @@ impl Ebook {
def = re_define_parens_end.replace_all(&def, "[[$2$3]]").to_string();
def = re_define.replace_all(&def, "[[$2]]").to_string();
// Remove 'See also' from the text.
- def = re_see_also.replace_all(&def, "").to_string();
+ if !dont_remove_see_also {
+ def = re_see_also.replace_all(&def, "").to_string();
+ }
// [[wordlink]] -> [wordlink](/define/wordlink)
def = re_bracket_links.replace_all(&def, "[$1](/define/$1)").to_string();
@@ -1353,6 +1362,37 @@ impl Ebook {
}
}
+ pub fn process_define_links(&mut self) {
+ // [abhuṃ](/define/abhuṃ)
+ let re_define = Regex::new(r"\[([^\]]+)\]\(/define/([^\(\)]+)\)").unwrap();
+
+ for (_, dict_word) in self.dict_words.iter_mut() {
+ let def = dict_word.definition_md.clone();
+ for cap in re_define.captures_iter(&def) {
+ let link = cap[0].to_string();
+ let word = cap[2].to_string();
+
+ if self.valid_words.contains(&word) {
+ // If it is a valid word entry, replace to bword:// for Stardict and Babylon.
+ match self.output_format {
+ OutputFormat::StardictXml | OutputFormat::BabylonGls => {
+ let mut s = dict_word.definition_md.clone();
+ s = s.replace(&link, &format!("[{}](bword://{})", word, word)).to_string();
+ dict_word.definition_md = s;
+ }
+ _ => {}
+ }
+ } else {
+ // If it is not a valid word entry, we will replace it with text.
+ let mut s = dict_word.definition_md.clone();
+ s = s.replace(&link, &format!("*{}*", word)).to_string();
+ dict_word.definition_md = s;
+ }
+
+ }
+ }
+ }
+
pub fn process_summary(&mut self) -> Result<(), Box> {
let re_links = Regex::new(r"\[([^\]]*)\]\([^\)]*\)").unwrap();
let re_spaces = Regex::new(" +").unwrap();
@@ -1615,7 +1655,7 @@ impl Default for EbookMetadata {
source: "https://simsapa.github.io".to_string(),
cover_path: "default_cover.jpg".to_string(),
book_id: "SimsapaPaliDictionary".to_string(),
- version: "0.1.0".to_string(),
+ version: "0.2.0-alpha.1".to_string(),
created_date_human: "".to_string(),
created_date_opf: "".to_string(),
word_prefix: "".to_string(),
diff --git a/src/helpers.rs b/src/helpers.rs
index 964a521..f81c3de 100644
--- a/src/helpers.rs
+++ b/src/helpers.rs
@@ -168,3 +168,12 @@ pub fn ok_or_exit(wait: bool, res: Result>) -> T {
}
}
}
+
+/// https://stackoverflow.com/questions/38406793/why-is-capitalizing-the-first-letter-of-a-string-so-convoluted-in-rust
+pub fn uppercase_first_letter(s: &str) -> String {
+ let mut c = s.chars();
+ match c.next() {
+ None => String::new(),
+ Some(f) => f.to_uppercase().collect::() + c.as_str(),
+ }
+}
diff --git a/src/main.rs b/src/main.rs
index 72dbd09..f74bb49 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -98,10 +98,22 @@ fn main() {
ebook.process_also_written_as();
ebook.process_strip_repeat_word_title();
ebook.process_grammar_note();
- ebook.process_see_also_from_definition();
+ ebook.process_see_also_from_definition(app_params.dont_remove_see_also);
ok_or_exit(app_params.used_first_arg, ebook.process_summary());
}
+ // If title was given on CLI, override
+ if let Some(ref title) = app_params.title {
+ ebook.meta.title = title.clone();
+ }
+
+ // If dict_label was given on CLI, override
+ if let Some(ref dict_label) = app_params.dict_label {
+ for (_key, word) in ebook.dict_words.iter_mut() {
+ word.word_header.dict_label = dict_label.clone();
+ }
+ }
+
ok_or_exit(app_params.used_first_arg, ebook.write_markdown());
}
@@ -122,6 +134,18 @@ fn main() {
info!("Added words: {}", ebook.len());
+ // If title was given on CLI, override
+ if let Some(ref title) = app_params.title {
+ ebook.meta.title = title.clone();
+ }
+
+ // If dict_label was given on CLI, override
+ if let Some(ref dict_label) = app_params.dict_label {
+ for (_key, word) in ebook.dict_words.iter_mut() {
+ word.word_header.dict_label = dict_label.clone();
+ }
+ }
+
ok_or_exit(app_params.used_first_arg, ebook.write_markdown());
}
@@ -156,6 +180,7 @@ fn main() {
ebook.process_add_transliterations();
ebook.process_links();
+ ebook.process_define_links();
// If title was given on CLI, override
if let Some(ref title) = app_params.title {
@@ -207,6 +232,7 @@ fn main() {
ebook.process_add_transliterations();
ebook.process_links();
+ ebook.process_define_links();
// Convert /define/word links with bword://word, as recognized by Stardict.
for (_, w) in ebook.dict_words.iter_mut() {
@@ -258,6 +284,7 @@ fn main() {
ebook.process_add_transliterations();
ebook.process_links();
+ ebook.process_define_links();
if let Some(ref title) = app_params.title {
ebook.meta.title = title.clone();
diff --git a/tests/data/data with space/ncped with space.xlsx b/tests/data/data with space/ncped with space.xlsx
index e71f579dd67dc3c6d0b3bbd59a8d8391fa49929d..9d56f2d7f6e79415295f1f47b00a13b7f37e760e 100644
GIT binary patch
delta 2665
zcmZ`*c{CeX7fJlT5W6@gi7pUtVhl`lqd=yW6EAA_7#71pInjC+yaI!YV+yLru+Yp
z=q4nqel{C7U>8xuab^yhtn+s7MQ?QJ1}b7axHuxLh-qlX#47#1Ry)3_h>f|$4mzp4
zsv1zX04Y8{xcP^>5)aI*MS)H5mt%LWlpd?cKSgIZx8grPe9=)%d65Cv9keW??`>>+Gm5TZ!E+?
z<0yM-uw%vuaP&Asa?O&@SVnbUBr9g)%|;td>vELmc=dcSqg|l-xP@|#&TEwmzAd-~
zNe=h>b@&7|A0cyyZU_clEwch@oLd+xIxH3QEZu6Gt(n;65qBCLcsE=OHM5))k!d%z%|}trcVNlcYWX9^QKdZ)tCL?xx*ZoY+tLcn8FE#>So!yTY@rjfO?v<
ziDIA1g~(DnY7H^Q0r?lF_p8g)C=cqj3>H{i@uDp4Ot;s~0xsFTBN_Mzy)@92$6je}
z#Z6OJA0^gAXV___MiO~?u|Fzq7%3!tnSoUZw2=Ds6!zD98#_YIrX{@unJqvu*Yj2A(s!Vfe4%vF9Xkz1j
z4SyWlDshYEH~FynLrIT(p%#iEwb??u)@D(Be)W7F{@AhoEy56e@B``|VmQ?O3E)Qu
z^3F?@6~l3PA&HYWahvg_owgKr8*zH(^x|UCwhH#pR98+!8N4q4xf&(UKlu4jO1P95
zd7HLHjU0d6>z7A^(HzJ8nqHzsY-2Onwr98o0i+VthVmt7DA5UuDo}SAz|H*W`dLiSu%q2wzY3npW9`A8=(mQ+(L)7U9FS
z8jVA*O;rmd`ApX>QNx|fb1NH;oDrtRj{dS^lE~L;kK1l)Ig8jRg36O*MuNn9j5*gE
zl{9a4sftcC_DIWB9dxF?DlOVK2;H95O*aqBbvR;S;O9-sx_
z?@_nNz<2f-lbAH{Zgy~Deo486KPo5^%-vRN#YGZBzs+|hkHT_?iTs8Mxh_0cgp2PBAxp8jcm#!FVeZTql~Q*a+^C4f
zO)9InrH>`^4NU7IQ~{+r{!iO>N!u8)T0?+^gUX`
zPC;tFbVyXs;E+4E0t3jdAdibpNWHec6^cMDmB!%PcUa&-(LZxX~?J3%oxo%sxxm
zv+*F}dfyitj>0Bqx{%}d3S~mK)Y^L@j9NL9qMeINkhNmGPlA-*sJPTZVLo3aVt!R$
z$xhXS!LimKh0HEub9Z}6y6@Qa%ene}sPey^NK1X|e!uF=?`C1#ju;7rg8Gt6qGm`N
zrurbf;E>{FsTlrcojKg6{w2MH-|ujQ`hHwEhLHB~{)X3H2cnluv>Ch_(tOj1aWhV6
zNel$fq1SFr#sGMp>w7Un&_#)FfaEOmc*YNVsX-P5*{O6VsxLy!l}|PEg+ZxrJMe
z{W{|I4)*4mxCkVw!+!(EW8a`#o(K|Bh`C_4+DRJJV{+13$`$jniP5Qd!Qbt3tg&l0
zfomUX&%nVG1++5MOvAwz1e0k!rZ53r3>#U7YfkZPugjtR9-VV4kdJ3*{H(o&ur%h5
z7YmoXz8@~RI7-aIHH7~BYriO>CTAeG+5kCWC1qvnp-wXiYJ_2xyb$q>C=0*q?N&+p
z-PoTdU5XbowUe&()SctSOUL>9#4bV{tU`a&cIcI2f-kKRnAWb!NH?@+sp
zBc+FX1=G@>4DQ-e#LDzN#=RQx-cfCZN|zTqTIPw_Vcy;|dpB)DwLchtRDr2M$n@Py
zNU{WM;PX(9#K0{Xf7=lro>$BdsEbCuHt#(_2~D=S6@VE{Ne_uio>w)tMnd4$sHYk4
znqa(OO&cnKM4Gh2KtqFU&hqAUX?E*`*#BjnJL7C)VpPre#pEKyjbt{0}s75
zD^J)>>hhy_O)J~>ReoX$$87PD3W`tNj8~0d{%yZ#H*o9w$GC6opP~HB$(@glB`Txr
z$U11QtvM#Dt6x}}qLm1b)`r|#hlg?D;6uZQt)$)2!;$I)&*u%xmwkQl9MPUH$|syH
zjsQpp6+6u2gCfY`fsw563tWn{DsC>dRUlD$yMN}Kdce!l_viL9B6Xb=}&iIwHFc|oTa8Fnca_aEU77+UT12|PC83~+B
zB7{GL&z*(Qgu!%lY$qmuf+HKFyCnih#|QX}y`AQ+xB(Xho$Tn;K!UlLD$b7cCrqZhuK)l5
delta 2720
zcmZWrc{mhY7avPTwxKM;7{e)kZyKWlS4ShKM51K8Qw0onm!
zcy2)^7RHQ`Wzs?^&zXy`&S9%sUk32vJ~k)|wx4h}_0p`#sX=tsznuXRWi}nQ&;e8Q
zTxol!DRKqYlvBks!S9y2_?W%)d&rd7(K~(en*sjseY(o_945Y*8A)eUQ@$_tE@vK#
z(t4uq^GR2gKpa^fao2pu?!Sh-NCxj)xQ;wB3-01FyoN-z^Q3D!ie(b0DJCyl&2F_c
zPTARsxlEx|t9WV(zECS(GMn4?=O?7}MjxHUm#ZrW=@F$iY?bjNoXf3){v-!zS*3@c
zP9xRdO=hKk;NkF#2iFbsgEN{rC_J*Mh?GvF-R1y+uEEpD94B<`OH#_U!}YP79HDNf
z!jrVp9S2ck7NYW87HIc{IOjjAx7fU$JKnxbd7a0t-R6Ld3-=u|T;5Yi5FM!>U+cJW
zq0Qj>V=y!|da(1j#A}MI3l}VzOZ;MAP^({{LE6P0zP5%FIBGGD$R5n=YDjV)X7BvWEM2WPq%E
zxK&jzcl9909yIv{mFCNU$FujlsyQv6IBFywZL5BBoI$voiqcZ?x6hs7y)u1Y$PZ1m
z1@m1!B*;#uj>8vCH(HP64wvk)n!9i(T21!fLVY}lXJN4j_$c?|#4j6@lM_3UM=xj(
zc$!i5i+K#qd1z`Nnq&jqPStxpj(q|g+~){YCF3xDm_+a6{Wf(%OI%r
z`_KzIoQ5Oex}Ybe_cDX)q;xano_x$w_9k-4yB0=s%OU~Ot5MsZmqr>Z#Y;##Z^>J1
zDiL?Sz3H}Tw>oA!tX{vm`ybP}oRC|MMQK<0wE~37y1&!li)%%0JLDv*=TKzHDqYS+
zM(U6n>0k|H9}YR_xyk|nz_av~4D*mf5K^)Q;9yWed*qq&6Ttky&`?7R8
z1Q^}JoLFNvE_sgbQq)%QeGjVOQ6x|e-EK9|rP7yQ(|e_iGq6vBhbZc`dc+1}5w7Hd
zF|d3YrQwJcN5wQo!Bk{>AXIl?Pf}eHn`yTb))do6Ut6nx`nK$B)p9ZXJO~zi
zBKU-Zu)3}b#RT;}hi^>~E(r!@2A?(W+Oh_ftu5|emtIn@F!c_!xig*Zp9Z$^tv8rW
z;qPGwjazrA*(9^&gvy-{$%xgPYdfv!8>16>?+p`o<|>RMJuI5vDW6a2)II;PnSK$U
zg*@dUrbBAPL$Aeqc7FW>2ApTN1}ileID2MdrhP;YpjN@y(@EGRV{+lGEg?i*o=2FU;379cD4T
zty}N@+IB^i+}z~r*!bi2B(`R|%X@7#vIVB=K0Bd*1to6P9#xrM+fr__5P2Iea1sJ)
zZTBtQOt19^TPc=BjIvUFhy?E2O7Xm+H?z*+YXD6_en*bQxb4OSCCYyxT+WjsrS$&I2vh!I!S
z14%5mMNO|Tg?sF^;CpN^U-Q`jI!DPI+b&r?@Q`^Gac@2{^5=)(4zM
zDXbwtIOiF&A47$hW&MLrX^D*-HI+vV!~}$270H>0H2{h(}ddA!7ZN>{j@Fo0-ypOG_?+
zy1M_8K1!)BDm=U5U4xEY`b7Ne%Kb)cLG*L=o@CxfR=kd23!UHMpqQ|2)uQy=RHFBa{
zFnKWgmlB+>^Odm5od=T2wt0?TLFhT)O+uSI$t{Gi5)`kSiBNv0H_C=bozdg3#{Y$m
z^>V3qlJViRD6&8nU8=@KGu^q%^G=|4w)b6vg@l8rk-+_U=?UYh`OV(QW90?*yOyK{
zLn9M038~LJ7K9O@EJZo!6|JjBEkjk&0fwbx-*pN~`JX2n>vonrR14WWnfVo>z6bn4
z4oz&@4qqAM5X|v^1E%#h7X7iKv+1TGl_OLNl{%^=|dBjiuqF3(u)B%s|bh>@oB
zo`D>?sv97G=o6d!C6vdM(r|P2LvxF`j%VLO8VEh>Trv68H#l&o%UM~m9fEOi6{D4M
zO}NbZ*+H{all-;Ed^N41cdD|_G9m>7*^k>inr1*sI8>=lnc}mQ)X-VX(=549gdh
z#n%r7<#W%)e89XYsB4U}T1IEQH5}|;=*&p={?G)s9~jXCx1eseZDbgJlUj-z3&bNh
zpPTG~zSwEhy>c#nMy@@x=V@YOtXsg@U0t(Qz8^15t@A9*GM|mp(1V1kttf_*v$r?l
zj`Ob2XbNF2C5gH7z(y(xFF?3qWOxH*6u8Z3q=4*yEdt@8OaQ<<^N%F`M&R}U0H9q|
zsPb1b`oP*ejC|SuL-<9)TH-%eqG+wFuNc@ih5-@A{Ev_@&cX^mF~7eA@IL^C_61WR
zEcX**&fST`u>t_Qya2$zF)ElPVY#0@9;XO|84SY6i$Fbpxb{0q&lzR7jT8U?xc_C=
zpAP^?h)|7