From 85b0c6150e6fbecc267b34952b4060fd81270bbe Mon Sep 17 00:00:00 2001 From: Malcolm Langfield <35980963+langfield@users.noreply.github.com> Date: Tue, 7 Nov 2023 03:09:32 -0500 Subject: [PATCH] Build docs in github actions, remove html files from `docs/` --- .github/workflows/docs.yml | 45 + docs/functional.html | 1462 ------------- docs/index.html | 4256 ------------------------------------ docs/maybes.html | 1324 ----------- docs/monadic.html | 342 --- docs/note.html | 452 ---- docs/safe.html | 342 --- docs/transformer.html | 805 ------- docs/types.html | 3428 ----------------------------- 9 files changed, 45 insertions(+), 12411 deletions(-) create mode 100644 .github/workflows/docs.yml delete mode 100644 docs/functional.html delete mode 100644 docs/index.html delete mode 100644 docs/maybes.html delete mode 100644 docs/monadic.html delete mode 100644 docs/note.html delete mode 100644 docs/safe.html delete mode 100644 docs/transformer.html delete mode 100644 docs/types.html diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..f7a1f863 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,45 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Deploy static content to Pages + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup Pages + uses: actions/configure-pages@v3 + - name: Make docs + run: make documentation + - name: Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + # Upload html from ./docs + path: 'docs' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/docs/functional.html b/docs/functional.html deleted file mode 100644 index 8a96f1ce..00000000 --- a/docs/functional.html +++ /dev/null @@ -1,1462 +0,0 @@ - - - - - - -ki.functional API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.functional

-
-
-

Type-safe, non Anki-specific functions.

-
- -Expand source code - -
#!/usr/bin/env python3
-"""Type-safe, non Anki-specific functions."""
-
-# pylint: disable=import-self, too-many-return-statements
-# pylint: disable=no-value-for-parameter
-
-import os
-import re
-import sys
-import shutil
-import hashlib
-import tempfile
-import functools
-import subprocess
-import unicodedata
-from pathlib import Path
-from itertools import chain
-from functools import reduce, partial, update_wrapper, wraps
-
-import git
-from tqdm import tqdm
-from colorama import Fore, Style
-
-from beartype import beartype
-from beartype.typing import (
-    List,
-    Union,
-    Generator,
-    Tuple,
-    Callable,
-    Any,
-    FrozenSet,
-    Iterable,
-    TypeVar,
-)
-
-import ki.functional as F
-from ki.types import (
-    File,
-    Dir,
-    EmptyDir,
-    NoPath,
-    NoFile,
-    Link,
-    Singleton,
-    PseudoFile,
-    KiRev,
-    Rev,
-)
-
-_has_type_hint_support = sys.version_info[:2] >= (3, 5)
-
-T = TypeVar("T")
-
-UTF8 = "UTF-8"
-GIT = ".git"
-GITMODULES_FILE = ".gitmodules"
-PIPE = subprocess.PIPE
-STDOUT = subprocess.STDOUT
-BRANCH_NAME = "main"
-
-# Emoji regex character classes.
-EMOJIS = "\U0001F600-\U0001F64F"
-PICTOGRAPHS = "\U0001F300-\U0001F5FF"
-TRANSPORTS = "\U0001F680-\U0001F6FF"
-FLAGS = "\U0001F1E0-\U0001F1FF"
-
-# Regex to filter out bad stuff from filenames.
-SLUG_REGEX = re.compile(r"[^\w\s\-" + EMOJIS + PICTOGRAPHS + TRANSPORTS + FLAGS + "]")
-
-
-@beartype
-def curried(func: Callable[[Any, ...], T]) -> Callable[[Any, ...], T]:
-    """A decorator that makes the function curried
-
-    Usage example:
-
-    >>> @curried
-    ... def sum5(a, b, c, d, e):
-    ...     return a + b + c + d + e
-    ...
-    >>> sum5(1)(2)(3)(4)(5)
-    15
-    >>> sum5(1, 2, 3)(4, 5)
-    15
-    """
-
-    def _args_len(func):
-        # pylint: disable=import-outside-toplevel
-        good = True
-        try:
-            from inspect import signature
-
-            signature(func)
-        except TypeError:
-            good = False
-
-        if good and _has_type_hint_support:
-            from inspect import signature
-
-            args = signature(func).parameters
-        else:
-            from inspect import getfullargspec
-
-            args = getfullargspec(func).args
-
-        return len(args)
-
-    @wraps(func)
-    def _curried(*args, **kwargs):
-        f = func
-        count = 0
-        while isinstance(f, partial):
-            if f.args:
-                count += len(f.args)
-            f = f.func
-
-        if count == _args_len(f) - len(args):
-            return func(*args, **kwargs)
-
-        para_func = partial(func, *args, **kwargs)
-        if hasattr(f, "__name__"):
-            update_wrapper(para_func, f)
-        return curried(para_func)
-
-    def _curried_lambda(*args, **kwargs):
-        return partial(func, *args, **kwargs)
-
-    if func.__name__ == "<lambda>":
-        return _curried_lambda
-
-    return _curried
-
-
-@beartype
-def rmtree(target: Dir) -> NoFile:
-    """Equivalent to `shutil.rmtree()`."""
-    shutil.rmtree(target)
-    return NoFile(target)
-
-
-@beartype
-def copytree(source: Dir, target: NoFile) -> Dir:
-    """Call shutil.copytree()."""
-    shutil.copytree(source, target, symlinks=True)
-    return Dir(target.resolve())
-
-
-@beartype
-def movetree(source: Dir, target: NoFile) -> Dir:
-    """Call shutil.move()."""
-    shutil.move(source, target)
-    return Dir(target.resolve())
-
-
-@beartype
-def cwd() -> Dir:
-    """Call Path.cwd()."""
-    return Dir(Path.cwd().resolve())
-
-
-@beartype
-def is_root(path: Union[File, Dir]) -> bool:
-    """Check if 'path' is a root directory (e.g., '/' on Unix or 'C:\' on Windows)."""
-    # Links and `~`s are resolved before checking.
-    path = path.resolve()
-    return len(path.parents) == 0
-
-
-@functools.cache
-@beartype
-def shallow_walk(
-    directory: Dir,
-) -> Tuple[Dir, List[Dir], List[File]]:
-    """Walk only the top-level directory with `os.walk()`."""
-    # pylint: disable=redefined-outer-name
-    root, dirs, files = next(os.walk(directory))
-    root = Dir(root)
-    dirs = [Dir(root / d) for d in dirs]
-    # TODO: Treat symlinks.
-    files = [File(root / f) for f in files]
-    return root, dirs, files
-
-
-@beartype
-def walk(
-    directory: Dir,
-) -> FrozenSet[Union[File, PseudoFile, Link, NoFile]]:
-    """Get all file-like leaves in a directory, recursively."""
-    # pylint: disable=redefined-outer-name
-    leaves = frozenset()
-    for root, _, files in os.walk(directory):
-        root = Dir(root)
-        leaves |= frozenset({F.chk(root / f) for f in files})
-    return leaves
-
-
-# TODO: Remove `resolve: bool` parameter, and test symlinks before resolving.
-@beartype
-def chk(
-    path: Path,
-    resolve: bool = True,
-) -> Union[File, Dir, EmptyDir, PseudoFile, NoPath, NoFile, Link]:
-    """Test whether `path` is a file, a directory, or something else."""
-    if resolve:
-        path = path.resolve()
-    if path.is_file():
-        return File(path)
-    if path.is_dir():
-        if is_empty(Dir(path)):
-            return EmptyDir(path)
-        return Dir(path)
-    if path.exists():
-        return PseudoFile(path)
-    if os.path.islink(path):
-        return Link(path)
-    if path.parent.is_dir():
-        return NoFile(path)
-    return NoPath(path)
-
-
-@beartype
-def touch(directory: Dir, name: str) -> File:
-    """Touch a file."""
-    path = directory / singleton(name)
-    path.touch()
-    return File(path.resolve())
-
-
-@beartype
-def write(path: Union[File, NoFile], text: str) -> File:
-    """Write text to a file."""
-    with open(path, "w+", encoding="UTF-8") as f:
-        f.write(text)
-    return File(path)
-
-
-@beartype
-def writeb(path: Union[File, NoFile], bs: bytes) -> File:
-    """Write text to a file."""
-    with open(path, "wb") as f:
-        f.write(bs)
-    return File(path)
-
-
-@beartype
-def symlink(path: NoFile, target: Path) -> Link:
-    """Link `path` to `target`."""
-    os.symlink(target, path)
-    return Link(path)
-
-
-@beartype
-def mksubdir(directory: EmptyDir, suffix: Path) -> EmptyDir:
-    """
-    Make a subdirectory of an empty directory (with parents).
-
-    Returns
-    -------
-    EmptyDir
-        The created subdirectory.
-    """
-    subdir = directory / suffix
-    subdir.mkdir(parents=True)
-    directory.__class__ = Dir
-    return EmptyDir(subdir.resolve())
-
-
-@beartype
-def force_mkdir(path: Path) -> Dir:
-    """Make a directory (with parents, ok if it already exists)."""
-    path.mkdir(parents=True, exist_ok=True)
-    return Dir(path.resolve())
-
-
-@beartype
-def chdir(directory: Dir) -> Dir:
-    """Changes working directory and returns old cwd."""
-    old: Dir = F.cwd()
-    os.chdir(directory)
-    return old
-
-
-@beartype
-def parent(path: Union[File, Dir]) -> Dir:
-    """
-    Get the parent of a path that exists.  If the path points to the filesystem
-    root, we return itself.
-    """
-    if is_root(path):
-        return Dir(path.resolve())
-    return Dir(path.parent)
-
-
-@beartype
-def mkdtemp() -> EmptyDir:
-    """Make a temporary directory (in /tmp)."""
-    return EmptyDir(tempfile.mkdtemp()).resolve()
-
-
-@beartype
-def copyfile(source: File, target: Union[File, NoFile]) -> File:
-    """Safely copy a file to a valid location."""
-    shutil.copyfile(source, target)
-    return File(target.resolve())
-
-
-@beartype
-def rglob(d: Dir, pattern: str) -> List[File]:
-    """Call d.rglob() and returns only files."""
-    files = filter(lambda p: isinstance(p, File), map(F.chk, d.rglob(pattern)))
-    return list(files)
-
-
-@beartype
-def is_empty(directory: Dir) -> bool:
-    """Check if directory is empty, quickly."""
-    return not next(os.scandir(directory), None)
-
-
-@beartype
-def root(repo: git.Repo) -> Dir:
-    """Get working directory of a repo."""
-    return Dir(repo.working_dir).resolve()
-
-
-@beartype
-def gitd(repo: git.Repo) -> Dir:
-    """Get git directory of a repo."""
-    return Dir(repo.git_dir).resolve()
-
-
-@beartype
-def singleton(name: str) -> Singleton:
-    """Removes all forward slashes and returns a Singleton pathlib.Path."""
-    return Singleton(name.replace("/", ""))
-
-
-@beartype
-def md5(path: File) -> str:
-    """Compute md5sum of file at `path`."""
-    hash_md5 = hashlib.md5()
-    with open(path, "rb") as f:
-        for chunk in iter(lambda: f.read(4096), b""):
-            hash_md5.update(chunk)
-    return hash_md5.hexdigest()
-
-
-@beartype
-def rev_exists(repo: git.Repo, rev: str) -> bool:
-    """Check if git commit reference exists in repository."""
-    try:
-        repo.git.rev_parse("--verify", rev)
-    except git.GitCommandError:
-        return False
-    return True
-
-
-@beartype
-def get_batches(lst: List[File], n: int) -> Generator[File, None, None]:
-    """Yield successive n-sized chunks from lst."""
-    for i in range(0, len(lst), n):
-        yield lst[i : i + n]
-
-
-@beartype
-def slugify(value: str) -> str:
-    """
-    Taken from [1]. Convert spaces or repeated dashes to single dashes. Remove
-    characters that aren't alphanumerics, underscores, or hyphens. Convert to
-    lowercase. Also strip leading and trailing whitespace, dashes, and
-    underscores.
-
-    [1] https://github.com/django/django/blob/master/django/utils/text.py
-    """
-    value = unicodedata.normalize("NFKC", value)
-    value = re.sub(SLUG_REGEX, "", value.lower())
-    return re.sub(r"[-\s]+", "-", value).strip("-_")
-
-
-@beartype
-def ki_rev_to_rev(ki_rev: KiRev) -> Rev:
-    """Convert a ki repository commit rev to a git repository commit rev."""
-    return Rev(ki_rev.kirepo.repo, ki_rev.sha)
-
-
-@beartype
-def mkdir(path: NoPath) -> EmptyDir:
-    """Make a directory (with parents)."""
-    path.mkdir(parents=True)
-    return EmptyDir(path)
-
-
-@beartype
-def unlink(file: Union[File, Link]) -> NoFile:
-    """Safely unlink a file."""
-    os.unlink(file)
-    return NoFile(file)
-
-
-@curried
-@beartype
-def rmsm(repo: git.Repo, sm: git.Submodule) -> git.Commit:
-    """Remove a git submodule."""
-    # Remove the submodule root and delete its .git directory.
-    sm_root = Path(sm.module().working_tree_dir)
-    repo.git.rm(sm_root, cached=True)
-    dotgit = F.chk(sm_root / GIT)
-    if isinstance(dotgit, Dir):
-        F.rmtree(dotgit)
-    else:
-        dotgit.unlink(missing_ok=True)
-
-    # Directory `sm_root` should still exist after `git.rm()` call.
-    repo.git.add(sm_root)
-    return repo.index.commit(f"Add submodule `{sm.name}` as ordinary directory.")
-
-
-@beartype
-def unsubmodule(repo: git.Repo) -> git.Repo:
-    """
-    Un-submodule all the git submodules (converts them to ordinary subdirs and
-    destroys commit history). Commit the changes to the main repository.
-    """
-    _: List[git.Commit] = list(map(F.rmsm(repo), repo.submodules))
-    gitmodules_file: Path = F.root(repo) / GITMODULES_FILE
-    if gitmodules_file.exists():
-        repo.git.rm(gitmodules_file)
-        _ = repo.index.commit("Remove `.gitmodules` file.")
-    return repo
-
-
-@beartype
-def init(targetdir: Dir) -> Tuple[git.Repo, str]:
-    """Run `git init`, returning the repo and initial branch name."""
-    branch = BRANCH_NAME
-    try:
-        repo = git.Repo.init(targetdir, initial_branch=BRANCH_NAME)
-    except git.GitCommandError:
-        branch = "master"
-        repo = git.Repo.init(targetdir)
-    return repo, branch
-
-
-@beartype
-def isfile(p: Path) -> bool:
-    """Check if `p` is a File."""
-    return isinstance(p, File)
-
-
-@beartype
-def cat(xs: Iterable[Iterable[T]]) -> Iterable[T]:
-    """Concatenate some iterables."""
-    return chain.from_iterable(xs)
-
-
-@beartype
-def commitall(repo: git.Repo, msg: str) -> git.Commit:
-    """Commit all contents of a git repository."""
-    repo.git.add(all=True)
-    return repo.index.commit(msg)
-
-
-@curried
-@beartype
-def git_rm(repo: git.Repo, path: str) -> str:
-    """Remove a path in a repo."""
-    repo.git.rm(path)
-    return path
-
-
-@beartype
-def yellow(s: str) -> None:
-    """Print a message to the console in yellow."""
-    print(f"{Fore.YELLOW}{s}{Style.RESET_ALL}")
-
-
-@beartype
-def red(s: str) -> None:
-    """Print a message to the console in red."""
-    print(f"{Fore.RED}{s}{Style.RESET_ALL}")
-
-
-@beartype
-def progressbar(xs: Iterable[T], s: str) -> Iterable[T]:
-    """Print a progress bar for an iterable."""
-    ys: Iterable[T] = tqdm(xs, ncols=80)
-    ys.set_description(s)
-    return ys
-
-
-@beartype
-def starfilter(
-    f: Callable[[Any, ...], bool], xs: Iterable[Tuple[Any, ...]]
-) -> Iterable[Tuple[Any, ...]]:
-    """Filter an iterable, automatically unpacking tuple arguments."""
-    return filter(lambda x: f(*x), xs)
-
-
-@beartype
-def part(p: Callable[[T], bool], xs: Iterable[T]) -> Tuple[Iterable[T], Iterable[T]]:
-    """Partition a list on a boolean predicate (Trues, Falses)."""
-    return reduce(lambda s, x: s[not p(x)].append(x) or s, xs, ([], []))
-
-
-
-
-
-
-
-

Functions

-
-
-def cat(xs: collections.abc.Iterable[collections.abc.Iterable[~T]]) ‑> collections.abc.Iterable[~T] -
-
-

Concatenate some iterables.

-
- -Expand source code - -
@beartype
-def cat(xs: Iterable[Iterable[T]]) -> Iterable[T]:
-    """Concatenate some iterables."""
-    return chain.from_iterable(xs)
-
-
-
-def chdir(directory: Dir) ‑> Dir -
-
-

Changes working directory and returns old cwd.

-
- -Expand source code - -
@beartype
-def chdir(directory: Dir) -> Dir:
-    """Changes working directory and returns old cwd."""
-    old: Dir = F.cwd()
-    os.chdir(directory)
-    return old
-
-
-
-def chk(path: pathlib.Path, resolve: bool = True) ‑> Union[FileDirEmptyDirPseudoFileNoPathNoFileLink] -
-
-

Test whether path is a file, a directory, or something else.

-
- -Expand source code - -
@beartype
-def chk(
-    path: Path,
-    resolve: bool = True,
-) -> Union[File, Dir, EmptyDir, PseudoFile, NoPath, NoFile, Link]:
-    """Test whether `path` is a file, a directory, or something else."""
-    if resolve:
-        path = path.resolve()
-    if path.is_file():
-        return File(path)
-    if path.is_dir():
-        if is_empty(Dir(path)):
-            return EmptyDir(path)
-        return Dir(path)
-    if path.exists():
-        return PseudoFile(path)
-    if os.path.islink(path):
-        return Link(path)
-    if path.parent.is_dir():
-        return NoFile(path)
-    return NoPath(path)
-
-
-
-def commitall(repo: git.repo.base.Repo, msg: str) ‑> git.objects.commit.Commit -
-
-

Commit all contents of a git repository.

-
- -Expand source code - -
@beartype
-def commitall(repo: git.Repo, msg: str) -> git.Commit:
-    """Commit all contents of a git repository."""
-    repo.git.add(all=True)
-    return repo.index.commit(msg)
-
-
-
-def copyfile(source: File, target: Union[FileNoFile]) ‑> File -
-
-

Safely copy a file to a valid location.

-
- -Expand source code - -
@beartype
-def copyfile(source: File, target: Union[File, NoFile]) -> File:
-    """Safely copy a file to a valid location."""
-    shutil.copyfile(source, target)
-    return File(target.resolve())
-
-
-
-def copytree(source: Dir, target: NoFile) ‑> Dir -
-
-

Call shutil.copytree().

-
- -Expand source code - -
@beartype
-def copytree(source: Dir, target: NoFile) -> Dir:
-    """Call shutil.copytree()."""
-    shutil.copytree(source, target, symlinks=True)
-    return Dir(target.resolve())
-
-
-
-def curried(func: collections.abc.Callable[[typing.Any, ...], ~T]) ‑> collections.abc.Callable[[typing.Any, ...], ~T] -
-
-

A decorator that makes the function curried

-

Usage example:

-
>>> @curried
-... def sum5(a, b, c, d, e):
-...     return a + b + c + d + e
-...
->>> sum5(1)(2)(3)(4)(5)
-15
->>> sum5(1, 2, 3)(4, 5)
-15
-
-
- -Expand source code - -
@beartype
-def curried(func: Callable[[Any, ...], T]) -> Callable[[Any, ...], T]:
-    """A decorator that makes the function curried
-
-    Usage example:
-
-    >>> @curried
-    ... def sum5(a, b, c, d, e):
-    ...     return a + b + c + d + e
-    ...
-    >>> sum5(1)(2)(3)(4)(5)
-    15
-    >>> sum5(1, 2, 3)(4, 5)
-    15
-    """
-
-    def _args_len(func):
-        # pylint: disable=import-outside-toplevel
-        good = True
-        try:
-            from inspect import signature
-
-            signature(func)
-        except TypeError:
-            good = False
-
-        if good and _has_type_hint_support:
-            from inspect import signature
-
-            args = signature(func).parameters
-        else:
-            from inspect import getfullargspec
-
-            args = getfullargspec(func).args
-
-        return len(args)
-
-    @wraps(func)
-    def _curried(*args, **kwargs):
-        f = func
-        count = 0
-        while isinstance(f, partial):
-            if f.args:
-                count += len(f.args)
-            f = f.func
-
-        if count == _args_len(f) - len(args):
-            return func(*args, **kwargs)
-
-        para_func = partial(func, *args, **kwargs)
-        if hasattr(f, "__name__"):
-            update_wrapper(para_func, f)
-        return curried(para_func)
-
-    def _curried_lambda(*args, **kwargs):
-        return partial(func, *args, **kwargs)
-
-    if func.__name__ == "<lambda>":
-        return _curried_lambda
-
-    return _curried
-
-
-
-def cwd() ‑> Dir -
-
-

Call Path.cwd().

-
- -Expand source code - -
@beartype
-def cwd() -> Dir:
-    """Call Path.cwd()."""
-    return Dir(Path.cwd().resolve())
-
-
-
-def force_mkdir(path: pathlib.Path) ‑> Dir -
-
-

Make a directory (with parents, ok if it already exists).

-
- -Expand source code - -
@beartype
-def force_mkdir(path: Path) -> Dir:
-    """Make a directory (with parents, ok if it already exists)."""
-    path.mkdir(parents=True, exist_ok=True)
-    return Dir(path.resolve())
-
-
-
-def get_batches(lst: list[File], n: int) ‑> collections.abc.Generator[File, None, None] -
-
-

Yield successive n-sized chunks from lst.

-
- -Expand source code - -
@beartype
-def get_batches(lst: List[File], n: int) -> Generator[File, None, None]:
-    """Yield successive n-sized chunks from lst."""
-    for i in range(0, len(lst), n):
-        yield lst[i : i + n]
-
-
-
-def git_rm(repo: git.repo.base.Repo, path: str) ‑> str -
-
-

Remove a path in a repo.

-
- -Expand source code - -
@curried
-@beartype
-def git_rm(repo: git.Repo, path: str) -> str:
-    """Remove a path in a repo."""
-    repo.git.rm(path)
-    return path
-
-
-
-def gitd(repo: git.repo.base.Repo) ‑> Dir -
-
-

Get git directory of a repo.

-
- -Expand source code - -
@beartype
-def gitd(repo: git.Repo) -> Dir:
-    """Get git directory of a repo."""
-    return Dir(repo.git_dir).resolve()
-
-
-
-def init(targetdir: Dir) ‑> tuple[git.repo.base.Repo, str] -
-
-

Run git init(), returning the repo and initial branch name.

-
- -Expand source code - -
@beartype
-def init(targetdir: Dir) -> Tuple[git.Repo, str]:
-    """Run `git init`, returning the repo and initial branch name."""
-    branch = BRANCH_NAME
-    try:
-        repo = git.Repo.init(targetdir, initial_branch=BRANCH_NAME)
-    except git.GitCommandError:
-        branch = "master"
-        repo = git.Repo.init(targetdir)
-    return repo, branch
-
-
-
-def is_empty(directory: Dir) ‑> bool -
-
-

Check if directory is empty, quickly.

-
- -Expand source code - -
@beartype
-def is_empty(directory: Dir) -> bool:
-    """Check if directory is empty, quickly."""
-    return not next(os.scandir(directory), None)
-
-
-
-def is_root(path: Union[FileDir]) ‑> bool -
-
-

Check if 'path' is a root directory (e.g., '/' on Unix or 'C:' on Windows).

-
- -Expand source code - -
@beartype
-def is_root(path: Union[File, Dir]) -> bool:
-    """Check if 'path' is a root directory (e.g., '/' on Unix or 'C:\' on Windows)."""
-    # Links and `~`s are resolved before checking.
-    path = path.resolve()
-    return len(path.parents) == 0
-
-
-
-def isfile(p: pathlib.Path) ‑> bool -
-
-

Check if p is a File.

-
- -Expand source code - -
@beartype
-def isfile(p: Path) -> bool:
-    """Check if `p` is a File."""
-    return isinstance(p, File)
-
-
-
-def ki_rev_to_rev(ki_rev: KiRev) ‑> Rev -
-
-

Convert a ki repository commit rev to a git repository commit rev.

-
- -Expand source code - -
@beartype
-def ki_rev_to_rev(ki_rev: KiRev) -> Rev:
-    """Convert a ki repository commit rev to a git repository commit rev."""
-    return Rev(ki_rev.kirepo.repo, ki_rev.sha)
-
-
-
-def md5(path: File) ‑> str -
-
-

Compute md5sum of file at path.

-
- -Expand source code - -
@beartype
-def md5(path: File) -> str:
-    """Compute md5sum of file at `path`."""
-    hash_md5 = hashlib.md5()
-    with open(path, "rb") as f:
-        for chunk in iter(lambda: f.read(4096), b""):
-            hash_md5.update(chunk)
-    return hash_md5.hexdigest()
-
-
-
-def mkdir(path: NoPath) ‑> EmptyDir -
-
-

Make a directory (with parents).

-
- -Expand source code - -
@beartype
-def mkdir(path: NoPath) -> EmptyDir:
-    """Make a directory (with parents)."""
-    path.mkdir(parents=True)
-    return EmptyDir(path)
-
-
-
-def mkdtemp() ‑> EmptyDir -
-
-

Make a temporary directory (in /tmp).

-
- -Expand source code - -
@beartype
-def mkdtemp() -> EmptyDir:
-    """Make a temporary directory (in /tmp)."""
-    return EmptyDir(tempfile.mkdtemp()).resolve()
-
-
-
-def mksubdir(directory: EmptyDir, suffix: pathlib.Path) ‑> EmptyDir -
-
-

Make a subdirectory of an empty directory (with parents).

-

Returns

-
-
EmptyDir
-
The created subdirectory.
-
-
- -Expand source code - -
@beartype
-def mksubdir(directory: EmptyDir, suffix: Path) -> EmptyDir:
-    """
-    Make a subdirectory of an empty directory (with parents).
-
-    Returns
-    -------
-    EmptyDir
-        The created subdirectory.
-    """
-    subdir = directory / suffix
-    subdir.mkdir(parents=True)
-    directory.__class__ = Dir
-    return EmptyDir(subdir.resolve())
-
-
-
-def movetree(source: Dir, target: NoFile) ‑> Dir -
-
-

Call shutil.move().

-
- -Expand source code - -
@beartype
-def movetree(source: Dir, target: NoFile) -> Dir:
-    """Call shutil.move()."""
-    shutil.move(source, target)
-    return Dir(target.resolve())
-
-
-
-def parent(path: Union[FileDir]) ‑> Dir -
-
-

Get the parent of a path that exists. -If the path points to the filesystem -root, we return itself.

-
- -Expand source code - -
@beartype
-def parent(path: Union[File, Dir]) -> Dir:
-    """
-    Get the parent of a path that exists.  If the path points to the filesystem
-    root, we return itself.
-    """
-    if is_root(path):
-        return Dir(path.resolve())
-    return Dir(path.parent)
-
-
-
-def part(p: collections.abc.Callable[[~T], bool], xs: collections.abc.Iterable[~T]) ‑> tuple[collections.abc.Iterable[~T], collections.abc.Iterable[~T]] -
-
-

Partition a list on a boolean predicate (Trues, Falses).

-
- -Expand source code - -
@beartype
-def part(p: Callable[[T], bool], xs: Iterable[T]) -> Tuple[Iterable[T], Iterable[T]]:
-    """Partition a list on a boolean predicate (Trues, Falses)."""
-    return reduce(lambda s, x: s[not p(x)].append(x) or s, xs, ([], []))
-
-
-
-def progressbar(xs: collections.abc.Iterable[~T], s: str) ‑> collections.abc.Iterable[~T] -
-
-

Print a progress bar for an iterable.

-
- -Expand source code - -
@beartype
-def progressbar(xs: Iterable[T], s: str) -> Iterable[T]:
-    """Print a progress bar for an iterable."""
-    ys: Iterable[T] = tqdm(xs, ncols=80)
-    ys.set_description(s)
-    return ys
-
-
-
-def red(s: str) ‑> None -
-
-

Print a message to the console in red.

-
- -Expand source code - -
@beartype
-def red(s: str) -> None:
-    """Print a message to the console in red."""
-    print(f"{Fore.RED}{s}{Style.RESET_ALL}")
-
-
-
-def rev_exists(repo: git.repo.base.Repo, rev: str) ‑> bool -
-
-

Check if git commit reference exists in repository.

-
- -Expand source code - -
@beartype
-def rev_exists(repo: git.Repo, rev: str) -> bool:
-    """Check if git commit reference exists in repository."""
-    try:
-        repo.git.rev_parse("--verify", rev)
-    except git.GitCommandError:
-        return False
-    return True
-
-
-
-def rglob(d: Dir, pattern: str) ‑> list[File] -
-
-

Call d.rglob() and returns only files.

-
- -Expand source code - -
@beartype
-def rglob(d: Dir, pattern: str) -> List[File]:
-    """Call d.rglob() and returns only files."""
-    files = filter(lambda p: isinstance(p, File), map(F.chk, d.rglob(pattern)))
-    return list(files)
-
-
-
-def rmsm(repo: git.repo.base.Repo, sm: git.objects.submodule.base.Submodule) ‑> git.objects.commit.Commit -
-
-

Remove a git submodule.

-
- -Expand source code - -
@curried
-@beartype
-def rmsm(repo: git.Repo, sm: git.Submodule) -> git.Commit:
-    """Remove a git submodule."""
-    # Remove the submodule root and delete its .git directory.
-    sm_root = Path(sm.module().working_tree_dir)
-    repo.git.rm(sm_root, cached=True)
-    dotgit = F.chk(sm_root / GIT)
-    if isinstance(dotgit, Dir):
-        F.rmtree(dotgit)
-    else:
-        dotgit.unlink(missing_ok=True)
-
-    # Directory `sm_root` should still exist after `git.rm()` call.
-    repo.git.add(sm_root)
-    return repo.index.commit(f"Add submodule `{sm.name}` as ordinary directory.")
-
-
-
-def rmtree(target: Dir) ‑> NoFile -
-
-

Equivalent to shutil.rmtree().

-
- -Expand source code - -
@beartype
-def rmtree(target: Dir) -> NoFile:
-    """Equivalent to `shutil.rmtree()`."""
-    shutil.rmtree(target)
-    return NoFile(target)
-
-
-
-def root(repo: git.repo.base.Repo) ‑> Dir -
-
-

Get working directory of a repo.

-
- -Expand source code - -
@beartype
-def root(repo: git.Repo) -> Dir:
-    """Get working directory of a repo."""
-    return Dir(repo.working_dir).resolve()
-
-
-
-def shallow_walk(directory: Dir) ‑> tuple[Dir, list[Dir], list[File]] -
-
-

Walk only the top-level directory with os.walk().

-
- -Expand source code - -
@functools.cache
-@beartype
-def shallow_walk(
-    directory: Dir,
-) -> Tuple[Dir, List[Dir], List[File]]:
-    """Walk only the top-level directory with `os.walk()`."""
-    # pylint: disable=redefined-outer-name
-    root, dirs, files = next(os.walk(directory))
-    root = Dir(root)
-    dirs = [Dir(root / d) for d in dirs]
-    # TODO: Treat symlinks.
-    files = [File(root / f) for f in files]
-    return root, dirs, files
-
-
-
-def singleton(name: str) ‑> Singleton -
-
-

Removes all forward slashes and returns a Singleton pathlib.Path.

-
- -Expand source code - -
@beartype
-def singleton(name: str) -> Singleton:
-    """Removes all forward slashes and returns a Singleton pathlib.Path."""
-    return Singleton(name.replace("/", ""))
-
-
-
-def slugify(value: str) ‑> str -
-
-

Taken from [1]. Convert spaces or repeated dashes to single dashes. Remove -characters that aren't alphanumerics, underscores, or hyphens. Convert to -lowercase. Also strip leading and trailing whitespace, dashes, and -underscores.

-

[1] https://github.com/django/django/blob/master/django/utils/text.py

-
- -Expand source code - -
@beartype
-def slugify(value: str) -> str:
-    """
-    Taken from [1]. Convert spaces or repeated dashes to single dashes. Remove
-    characters that aren't alphanumerics, underscores, or hyphens. Convert to
-    lowercase. Also strip leading and trailing whitespace, dashes, and
-    underscores.
-
-    [1] https://github.com/django/django/blob/master/django/utils/text.py
-    """
-    value = unicodedata.normalize("NFKC", value)
-    value = re.sub(SLUG_REGEX, "", value.lower())
-    return re.sub(r"[-\s]+", "-", value).strip("-_")
-
-
-
-def starfilter(f: collections.abc.Callable[[typing.Any, ...], bool], xs: collections.abc.Iterable[tuple[typing.Any, ...]]) ‑> collections.abc.Iterable[tuple[typing.Any, ...]] -
-
-

Filter an iterable, automatically unpacking tuple arguments.

-
- -Expand source code - -
@beartype
-def starfilter(
-    f: Callable[[Any, ...], bool], xs: Iterable[Tuple[Any, ...]]
-) -> Iterable[Tuple[Any, ...]]:
-    """Filter an iterable, automatically unpacking tuple arguments."""
-    return filter(lambda x: f(*x), xs)
-
-
- -
-

Link path to target.

-
- -Expand source code - -
@beartype
-def symlink(path: NoFile, target: Path) -> Link:
-    """Link `path` to `target`."""
-    os.symlink(target, path)
-    return Link(path)
-
-
-
-def touch(directory: Dir, name: str) ‑> File -
-
-

Touch a file.

-
- -Expand source code - -
@beartype
-def touch(directory: Dir, name: str) -> File:
-    """Touch a file."""
-    path = directory / singleton(name)
-    path.touch()
-    return File(path.resolve())
-
-
- -
-

Safely unlink a file.

-
- -Expand source code - -
@beartype
-def unlink(file: Union[File, Link]) -> NoFile:
-    """Safely unlink a file."""
-    os.unlink(file)
-    return NoFile(file)
-
-
-
-def unsubmodule(repo: git.repo.base.Repo) ‑> git.repo.base.Repo -
-
-

Un-submodule all the git submodules (converts them to ordinary subdirs and -destroys commit history). Commit the changes to the main repository.

-
- -Expand source code - -
@beartype
-def unsubmodule(repo: git.Repo) -> git.Repo:
-    """
-    Un-submodule all the git submodules (converts them to ordinary subdirs and
-    destroys commit history). Commit the changes to the main repository.
-    """
-    _: List[git.Commit] = list(map(F.rmsm(repo), repo.submodules))
-    gitmodules_file: Path = F.root(repo) / GITMODULES_FILE
-    if gitmodules_file.exists():
-        repo.git.rm(gitmodules_file)
-        _ = repo.index.commit("Remove `.gitmodules` file.")
-    return repo
-
-
-
-def walk(directory: Dir) ‑> frozenset[typing.Union[FilePseudoFileLinkNoFile]] -
-
-

Get all file-like leaves in a directory, recursively.

-
- -Expand source code - -
@beartype
-def walk(
-    directory: Dir,
-) -> FrozenSet[Union[File, PseudoFile, Link, NoFile]]:
-    """Get all file-like leaves in a directory, recursively."""
-    # pylint: disable=redefined-outer-name
-    leaves = frozenset()
-    for root, _, files in os.walk(directory):
-        root = Dir(root)
-        leaves |= frozenset({F.chk(root / f) for f in files})
-    return leaves
-
-
-
-def write(path: Union[FileNoFile], text: str) ‑> File -
-
-

Write text to a file.

-
- -Expand source code - -
@beartype
-def write(path: Union[File, NoFile], text: str) -> File:
-    """Write text to a file."""
-    with open(path, "w+", encoding="UTF-8") as f:
-        f.write(text)
-    return File(path)
-
-
-
-def writeb(path: Union[FileNoFile], bs: bytes) ‑> File -
-
-

Write text to a file.

-
- -Expand source code - -
@beartype
-def writeb(path: Union[File, NoFile], bs: bytes) -> File:
-    """Write text to a file."""
-    with open(path, "wb") as f:
-        f.write(bs)
-    return File(path)
-
-
-
-def yellow(s: str) ‑> None -
-
-

Print a message to the console in yellow.

-
- -Expand source code - -
@beartype
-def yellow(s: str) -> None:
-    """Print a message to the console in yellow."""
-    print(f"{Fore.YELLOW}{s}{Style.RESET_ALL}")
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/index.html b/docs/index.html deleted file mode 100644 index 481033df..00000000 --- a/docs/index.html +++ /dev/null @@ -1,4256 +0,0 @@ - - - - - - -ki API documentation - - - - - - - - - - - - - - -
-
-
-

Package ki

-
-
-

Ki is a command-line interface for the version control and editing of .anki2 -collections as git repositories of markdown files. -Rather than providing an -interactive UI like the Anki desktop client, ki aims to allow natural editing -in the filesystem.

-

In general, the purpose of ki is to allow users to work on large, complex Anki -decks in exactly the same way they work on large, complex software projects.

-

Ki provides command-line functions to:

-
    -
  1. clone a .anki2 collection into a directory as a git repository.
  2. -
  3. pull changes from the Anki desktop client (and AnkiWeb) into an existing -repository.
  4. -
  5. push changes (safely!) back to Anki.
  6. -
-

This is documentation for the ki -repository.

-

Installation

-

Ki is tested on Python 3.9 and 3.10. You'll need to install Python and Git, and -then run the following command in a terminal:

-
    -
  1. Install the ki package:
  2. -
-
pip install git+https://github.com/langfield/ki.git@main
-
-

Getting started

-

This section will walk through the following example workflow:

-
    -
  1. Cloning an existing collection into a ki repository.
  2. -
  3. Editing the note files in the repository.
  4. -
  5. Pushing those edits back to Anki.
  6. -
  7. Pulling changes made in Anki into the repository.
  8. -
-

Before cloning, we'll need to find our .anki2 collection file. -This is where Anki stores the data for all our notes.

-
-

Note. If you're new to Anki, or are unfamiliar with the terms collection, -profile, note, or card, you may wish to take a look at the Anki -documentation.

-
-

If you already know the path to the .anki2 collection file you want to clone, -skip to the section on running the clone command.

-

Finding the .anki2 collection file

-

To find our collection file, we must first find our Anki data directory. The -location of this varies by operating system.

-

In most cases, you should be able to find your data directory at the path given -below for your respective OS:

-

MacOS

-
~/Library/Application Support/Anki2
-
-

Windows

-
%APPDATA%\Anki2
-
-

GNU/Linux

-
~/.local/share/Anki2
-
-
-

Note. You can read more about the default Anki data directory locations -here.

-
-
-

If you are running Anki 2.1 (which you should be, because ki is not tested -with lower versions), opening this directory will reveal several files and -subdirectories. The following example output is from a machine running Debian -GNU/Linux:

-
user@host:~/.local/share/Anki2$ ls
- addons21   crash.log   prefs21.db   README.txt  'User 1'
-
-

In particular, there is a subdirectory for each profile. In the above -example, there is only one profile, User 1. But, in general, there may be -many profiles associated with a given Anki installation.

-

Multiple profiles

-

Below we can see a visual representation of the directory structure of an -Anki data directory with two profiles, User 1, and User 2:

-
Anki2/
-├── addons21
-│   ├── 1046608507
-│   ├── 109531687
-│   ├── 1097423555
-│   └── 1972239816
-├── crash.log
-├── prefs21.db
-├── README.txt
-├── User 1
-│   ├── backups
-│   ├── collection2.log
-│   ├── collection.anki2
-│   ├── collection.log
-│   ├── collection.media
-│   ├── collection.media.db2
-│   └── deleted.txt
-└── User 2
-    ├── collection.anki2
-    ├── collection.anki2-wal
-    └── collection.media
-
-

Note that there is a collection.anki2 file in each profile subdirectory.

-

If you're not sure of the name of your user profile, it can be seen in the -title bar of the Anki desktop client:

-

- -

-

Most Anki installations will only have one profile, and if you haven't changed -the default profile name, it will probably be called User 1. Let's enter the -profile directory for User 1 and list its contents:

-
user@host:~/.local/share/Anki2$ cd User\ 1/
-user@host:~/.local/share/Anki2/User 1$ ls
-backups  collection2.log  collection.anki2  collection.log  collection.media  collection.media.db2  deleted.txt
-
-

So if we want to clone User 1's collection, the path that we want is:

-
~/.local/share/Anki2/User\ 1/collection.anki2
-
-

We'll pass this as a command-line argument to the ki executable in the next -section.

-

Running the clone command

-

Now we're ready to actually clone the collection into a repository. The ki clone -command works similarly to git clone, in that it will create a new directory -for the repository within the current working directory. So if we want to -clone our collection into a new subdirectory in ~/ (the home directory on -macOS and GNU/Linux), we would first make sure we're in the home directory. -Second, we need to check that Anki is closed before cloning. Nothing bad -will happen if we clone while Anki is open, but the command will fail because -the database is locked. Once we've done that, we can run the command:

-
ki clone ~/.local/share/Anki2/User 1/collection.anki2
-
-

And we should see output that looks similar to this:

-
lyra@oxford$ ki clone ~/.local/share/Anki2/User 1/collection.anki2
-Found .anki2 file at '/home/lyra/.local/share/Anki2/User 1/collection.anki2'
-Computed md5sum: ad7ea6d486a327042cf0b09b54626b66
-Wrote md5sum to '/home/lyra/collection/.ki/hashes'
-Cloning into '/home/lyra/collection/'...
-100%|█████████████████████████| 28886/28886 [00:10<00:00, 2883.78it/s]
-
-

If we list the contents of the home directory, we can see that ki did -indeed create a new directory called collection:

-
lyra@oxford:~$ ls
-collection  pkgs
-
-

Editing notes

-

Now that we've successfully cloned our Anki collection into a ki repository, -we can start editing notes! Our home directory looks like this:

-
lyra@oxford:~$ ls
-collection  pkgs
-
-

And we see the repo we cloned, which is called collection.

-

Let's change directories to the newly cloned ki repo and take a look at -what's inside:

-
lyra@oxford:~$ cd collection/
-lyra@oxford:~/collection$ ls --classify
-algebras/ manifolds/ rings/
-
-

We see that we have three directories, which represent three Anki decks. This -is just an example; you'll see directories corresponding to the top-level decks -in your Anki collection.

-
-

Note. The ls --classify command adds a trailing / to the end of -directories to distinguish them from ordinary files.

-
-

Lets enter the manifolds directory and see what's inside.

-
lyra@oxford:~/collection$ cd manifolds/
-lyra@oxford:~/collection/manifolds$ ls
-MANIFOLDS.md
-
-

So we see a single markdown file called MANIFOLDS.md, which contains the -notes for the manifolds deck. If we had subdecks of the manifolds deck, we -would see more subdirectories here, and each one would have a markdown file in -it as well. Lets open this file and see what's inside.

-

We'll use vim to open the markdown file in this example, but any text editor -will work.

-
lyra@oxford:~/collection/manifolds$ vi MANIFOLDS.md
-
-
# Note
-nid: 1622849751948
-model: Basic
-deck: manifolds
-tags:
-markdown: false
-
-## Front
-Diffeomorphism
-
-## Back
-A smooth surjective map between manifolds which has a smooth inverse.
-
-# Note
-nid: 1566621764508
-model: Basic
-deck: manifolds
-tags:
-markdown: false
-
-## Front
-distribution (on a smooth manifold)
-
-## Back
-A distribution on \(M\) of rank \(k\) is a rank-\(k\) subbundle of \(TM\)
-
-

So we see the structure of two notes inside this file. For each note, there is -a section for note metadata, and a section for each field.

-

There is a typo in the first note. It says smooth surjective map, but it -should say smooth bijective map. Lets fix it, save our changes, and go back -to the terminal. When we go back up to the root of the repository and run git -status, we can see which files we've changed.

-
-

INTERNAL. Add the output of git status here.

-
-

And running git diff shows us the content of the unstaged changes:

-
-

INTERNAL. Add the output of git diff here.

-
-

Then we can commit our changes as usual.

-
lyra@oxford:~/collection$ git add manifolds/MANIFOLDS.md
-lyra@oxford:~/collection$ git commit -m "Fix typo in diffeomorphism definition: 'surjective' -> 'bijective'"
-
-

At this point we would usually git push, but if we try that in a ki -repository, we'll see this:

-
lyra@oxford:~/collection$ git push
-fatal: No configured push destination.
-Either specify the URL from the command-line or configure a remote repository using
-
-    git remote add <name> <url>
-
-and then push using the remote name
-
-    git push <name>
-
-
-

Since we're not pushing to an ordinary git remote, but to the Anki SQLite3 -database, we must use ki push instead, which is covered briefly in the next -section.

-

Pushing committed changes back to Anki

-

This part is super easy! Similar to when we cloned, we must remember to close -Anki before pushing, or the command will fail (gracefully). All right, now we -just run the command:

-
lyra@oxford:~/collection$ ki push
-Pushing to '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-Computed md5sum: 199216c39eeabe23a1da016a99ffd3e2
-Verified md5sum matches latest hash in '/home/lyra/decks/.ki/hashes'
-Generating local .anki2 file from latest commit: 2aa009729b6dd337dd1ce795df611f5a49
-Writing changes to '/tmp/tmpyiids2qm/original.anki2'...
-100%|█████████████████████████████████| 2/2 [00:00<00:00, 1081.56it/s]
-Database was modified.
-Writing backup of .anki2 file to '/home/lyra/decks/.ki/backups'
-Overwrote '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-
-

As the output suggests, ki saves a backup of our collection each time we -push, just in case we wish to hard-revert a change you've made.

-

Now we can open Anki and view the changes we've made in the note browser!

-

Pulling changes from Anki into the repository

-

So now we know how to make changes from the filesystem and push them back to -Anki, but suppose that after we cloned our repository, we made some edits -within Anki, and we'd like those to show up in our repository? For this, -we'll need to close Anki, and then run the following command:

-
lyra@oxford:~/collection$ ki pull
-Pulling from '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-Computed md5sum: 199216c39eeabe23a1da016a99ffd3e2
-Updating 5a9ef09..9c30b73
-Fast-forward
- note1645010162168.md |  4 ++--
- note1645222430007.md | 11 +++++++++++
- 2 files changed, 13 insertions(+), 2 deletions(-)
- create mode 100644 note1645222430007.md
-
-From /tmp/tmpt5a3yd9a/ki/local/199216c39eeabe23a1da016a99ffd3e2/
- * branch            main       -> FETCH_HEAD
- * [new branch]      main       -> anki/main
-
-Wrote md5sum to '/home/lyra/decks/.ki/hashes'
-
-

And we're done! Our repository is up to date, as ki will tell us if we try to pull again:

-
lyra@oxford:~/collection$ ki pull
-ki pull: up to date.
-
-

Merge conflicts

-

Occasionally, when we edit the same lines in the same note fields in both Anki -and our local repository, we may encounter a merge conflict:

-
lyra@oxford:~/collection$ ki pull
-Pulling from '/home/lyra/.local/share/Anki2/User 1/collection.anki2'
-Computed md5sum: debeb6689f0b83d520ff913067c598e9
-Auto-merging note1645788806304.md
-CONFLICT (add/add): Merge conflict in note1645788806304.md
-Automatic merge failed; fix conflicts and then commit the result.
-
-From /tmp/tmpgkq4ilfy/ki/local/debeb6689f0b83d520ff913067c598e9/
- * branch            main       -> FETCH_HEAD
- * [new branch]      main       -> anki/main
-
-Wrote md5sum to '/home/mal/collection/.ki/hashes'
-
-

This is expected behavior, and since the process of resolving merge conflicts -is the same for ki repositories as git repositories (since ki -repositories are git repositories), we refer to -StackOverflow -for how to proceed.

-

Usage reference

-

Clone

-

The ki clone command takes one required argument (the path to a .anki2 -file) and one optional argument (a path to a target directory). The usage is -meant to mirror that of git clone.

-

An example of the clone subcommand usage and its output is given below.

-
$ ki clone ~/.local/share/Anki2/lyra/collection.anki2 decks
-
-
Found .anki2 file at '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-Computed md5sum: ad7ea6d486a327042cf0b09b54626b66
-Wrote md5sum to '/home/lyra/decks/.ki/hashes'
-Cloning into '/home/lyra/decks/'...
-100%|█████████████████████████| 28886/28886 [00:10<00:00, 2883.78it/s]
-
-

Pull

-

Once an Anki collection has been cloned, we can pull changes made by the Anki -desktop client into our repository.

-

An example of the pull subcommand usage and its output is given below.

-
$ ki pull
-
-
Pulling from '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-Computed md5sum: 199216c39eeabe23a1da016a99ffd3e2
-Updating 5a9ef09..9c30b73
-Fast-forward
- note1645010162168.md |  4 ++--
- note1645222430007.md | 11 +++++++++++
- 2 files changed, 13 insertions(+), 2 deletions(-)
- create mode 100644 note1645222430007.md
-
-From /tmp/tmpt5a3yd9a/ki/local/199216c39eeabe23a1da016a99ffd3e2/
- * branch            main       -> FETCH_HEAD
- * [new branch]      main       -> anki/main
-
-Wrote md5sum to '/home/lyra/decks/.ki/hashes'
-
-

ki first deletes any residual ephemeral repositories in /tmp/ki/remote/. -These would only remain here if a previous pull command failed.

-

It then verifies that the path to the .anki2 file specified in the .ki/ -directory (analogous to the .git/ directory) still exists.

-

It computes and records the hash of the collection file. In this way, ki -keeps track of whether the collection database has changed since the last -clone/pull.

-

Finally, the collection is then cloned into an ephemeral repository in a temp -directory, which is then git pull-ed into the current repository.

-

At this point, if the git operation fails, the user can take over and manage -the merge themselves.

-

Push

-

When we want to push our changes back to the Anki desktop client, we can use -ki push to do that.

-

An example of the push subcommand usage and its output is given below.

-
$ ki push
-
-
Pushing to '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-Computed md5sum: 199216c39eeabe23a1da016a99ffd3e2
-Verified md5sum matches latest hash in '/home/lyra/decks/.ki/hashes'
-Generating local .anki2 file from latest commit: 2aa009729b6dd337dd1ce795df611f5a49
-Writing changes to '/tmp/tmpyiids2qm/original.anki2'...
-100%|█████████████████████████████████| 2/2 [00:00<00:00, 1081.56it/s]
-Database was modified.
-Writing backup of .anki2 file to '/home/lyra/decks/.ki/backups'
-Overwrote '/home/lyra/.local/share/Anki2/lyra/collection.anki2'
-
-

We store 5 backups of the collection prior to a push.

-

Collaborative decks

-

This section assumes knowledge of the basic ki operations and familiarity -with git. If you haven't yet cloned your Anki collection into a ki -repository, read the getting started section.

-
    -
  1. Cloning a collaborative deck from GitHub.
  2. -
  3. Editing the collaborative deck.
  4. -
  5. [Pulling][pulling other users' changes from github] other users' changes to the deck from GitHub.
  6. -
  7. [Pushing][pushing edits back to github] edits back to GitHub.
  8. -
-

Cloning a collaborative deck from GitHub

-

Now that we've created our first ki repository, we might want to try our hand -at collaborating on a deck with other Anki users. We won't actually need to -make use of the ki program to do this, because ki repositories are also -git repositories, and so we can clone collaborative decks from GitHub as -git-submodules of our collection repo.

-
-

Note. If you're completely unfamiliar with git, consider reading this -short -introduction.

-
-

Suppose we've cloned an Anki collection into a ki repository in our home -directory, just like we did in the getting started section, -and we want to add a collaborative deck from GitHub to our collection. Let's -walk through an example. Our home directory looks like this:

-
lyra@oxford:~$ ls
-collection  pkgs
-
-

And we see the repo we cloned, which is called collection.

-

To add a collaborative deck repo as a submodule, we'll first need to change -directories to the newly cloned ki repo:

-
lyra@oxford:~$ cd collection/
-lyra@oxford:~/collection$ ls --classify
-algebras/ groups/ rings/
-
-

We see that we have three directories, which represent three Anki decks. This -is just an example; you'll see directories corresponding to the top-level decks -in your Anki collection.

-
-

Note. The ls --classify command adds a trailing / to the end of -directories to distinguish them from ordinary files.

-
-

Adding the repository as a git submodule

-

Suppose we want to add the collaborative deck -https://github.com/langfield/manifolds.git -to our collection. We can do that by running the command:

-
git-submodule add https://github.com/langfield/manifolds.git
-
-

which yields the output:

-
lyra@oxford~/collection$ git-submodule add https://github.com/langfield/manifolds.git
-Cloning into 'manifolds'...
-remote: Counting objects: 11, done.
-remote: Compressing objects: 100% (10/10), done.
-remote: Total 11 (delta 0), reused 11 (delta 0)
-Unpacking objects: 100% (11/11), done.
-Checking connectivity... done.
-
-

And we can see that the command was successful because we have a new -directory/deck called manifolds in our repo:

-
lyra@oxford:~/collection$ ls --classify
-algebras/ groups/ manifolds/ rings/
-
-

Nice!

-

Editing a collaborative deck

-

There are two ways to edit a collaborative deck locally:

-
    -
  1. Edit the markdown files in the ki repository.
  2. -
  3. Edit the deck inside the Anki desktop client.
  4. -
-
-

After we've cloned the manifolds deck repository into a submodule of our ki -repository, we may want to make some edits to the deck.

-

How it works

-

ki is built on top of existing tooling implemented in the python package -apy, which is used to parse the Anki -collection SQLite file and convert its contents to human-readable markdown -files.

-

These files (one per Anki note) are then dumped to a configurable location in -the filesystem as a git repository, whose structure mirrors that of the decks -in the collection. In effect, ki treats the git repo it generates as a local -copy of the collection, and the .anki2 collection file as a remote.

-

All operations like pulling updates to the collection into ki and pushing -updates from ki into Anki are handled by git under the hood.

-

This appproach has several advantages:

-
    -
  1. Merge conflicts can be handled in the usual, familiar way.
  2. -
  3. Additional remotes (e.g. a human-readable backup of a collection on github) -can be added easily.
  4. -
  5. Users are free to pick the editor of their choice, perform batch editing -with command line tools like awk or sed, and even add CI actions.
  6. -
-

Model

-

The following diagram shows the dataflow of a typical Anki/ki stack.

-
                 +-------------+          +--------------+
-                 |             |          |              |
-                 |   AnkiWeb  -------------  AnkiMobile  |
-                 |             |   sync   |              |
-                 +------|------+          +--------------+
-                        |
-                        | sync
-                        |
-                 +------|------+
-                 |             |
-                 |    Anki     |
-                 |             |
-                 +------|------+
-                        |
-                        | deck edits
-                        |
-               +--------|--------+               +------------------+
-               |                 |    ki clone   |                  |
-               |                 ---------------->                  |
-               | Collection file |               |     ~/decks/     |
-               |    (.anki2)     |    ki push    | (git repository) |
-               |                 <----------------                  |
-               |                 |               |                  |
-               +--------|--------+               +---------^--------+
-                        |                                  |
-                        | ki pull                          |
-                        |                                  |
-                        |                                  |
-             +----------v----------+                       |
-             |                     |                       |
-             | /tmp/ki/remote/AAA  |           ki pull     |
-             |  (git repository)   -------------------------
-             |    [ephemeral]      |
-             |                     |
-             +---------------------+
-
-

The node labeled Anki is the Anki desktop client on the localhost. It -communicates with the AnkiWeb servers via Anki's sync feature. Other clients -(e.g. AnkiDroid and AnkiMobile) are able to (1) pull changes made by the -desktop client into their local collections via AnkiWeb, and (2) push changes -made locally back to AnkiWeb.

-

When the Anki desktop client is started on the localhost, it opens and places a -lock on the .anki2 SQLite file. During the session, changes are possibly made -to the deck, and the SQLite file is unlocked when the program is closed.

-

Since ki must read from this database file, that means that ki commands -will not work while Anki is running. This is by design: the database is -locked for a reason, and enforcing this constraint lowers the likelihood that -users' decks become corrupted.

-

An ephemeral repository is used as an auxiliary step during the ki pull -operation so that we can merge the Anki desktop client's changes into our -repository via git.

-

Generating html

-

By default, ki parses the html of each field and dumps the content only, -insofar as that is possible. It also supports parsing arbitrary html elements -autogenerated by addons and regenerated the updated content. In the following -subsection, we walk through an example.

-

Example: generating syntax-highlighted code blocks

-

The anki addon developer Glutanimate has an addon called syntax-highlighting, -which adds UI elements to the Anki note editor that automatically generates a -syntax highlighted version of a code block from the clipboard. In effect, it -generates a formatted HTML table for the code listing that gets dumped into the -source of relevant note field.

-

A fork of this addon is available here: -https://ankiweb.net/shared/info/1100811177

-

And the source tree for the original addon is on github: -https://github.com/glutanimate/syntax-highlighting

-

For example, consider the following python code block:

-
n = 1
-n >> 1
-print(n)
-
-

Given the above code, the addon generates the following HTML:

-
<table class="highlighttable">
-    <tbody>
-        <tr>
-            <td class="linenos">
-                <div class="linenodiv">
-                    <pre>
-                        <span class="normal">1</span>
-                        <span class="normal">2</span>
-                        <span class="normal">3</span>
-                    </pre>
-                </div>
-            </td>
-            <td class="code">
-                <div class="highlight">
-                    <pre>
-                        <code>
-                            <span class="n">n</span>
-                            <span class="o">=</span>
-                            <span class="mi">1</span>
-                            <br>
-                                <span class="n">n</span>
-                                <span class="o">&gt;&gt;</span>
-                                <span class="mi">1</span>
-                                <br>
-                                    <span class="nb">print</span>
-                                    <span class="p">(</span>
-                                    <span class="n">n</span>
-                                    <span class="p">)</span>
-                                    <br>
-                                    </code>
-                                </pre>
-                </div>
-            </td>
-        </tr>
-    </tbody>
-</table>
-
-

Editing fields like this could become annoying very quickly. It would be better -if ki just gave us the markdown version above (only 3 lines), and then -regenerated the note field HTML when converting the repository back into a -.anki2 deck.

-

Adding ki HTML attributes

-

And in fact, this is possible. We first fork the addon so we can add some extra -data to our generated HTML. In particular, we'd like to add an attribute -ki-src whose value is the UTF-8 encoded source code. In general, this will be -the encoded version of the source of whatever we'd like to autoformat.

-

We also add a ki-formatter attribute, whose value is an identifier that -specifies a custom python module (we must implement this) that transforms the -(possibly edited) ki-src text back into a HTML element of the form seen -above.

-

So let's call our ki-formatter identifier syntax-hl-python. Then our addon -has to change the opening tag of the snippet above to look like:

-
<table class="highlighttable"; ki-src="n = 1\nn >> 1\nprint(n)\n"; ki-formatter="syntax-hl-python">
-
-

All ki needs is the original text of the code block prior to html formatting, -and a function that can reapply the formatting to the modified text. Since the -html table was generated by an addon, we already have a python function for -this, and in general we can provide a ~/.config/ki/ki.json file that maps -implementation IDs to paths of python modules. The module must have a top-level -function defined of the form format(text: str) -> bs4.Tag.

-

If we have an addon implementation, we can import it here and use it in our -format() implementation. We can add a ki attribute whose value is the -base64 encoding of the code block, and a implementation attribute whose value -is the name of a function. At import-time, ki will decode this and write the -human-readable source to the relevant markdown file instead.

-

Source code

-

If you have git, you can clone -a local copy of the source code by running the following command in a terminal:

-
git clone git@github.com:langfield/ki.git
-
-
- -Expand source code - -
"""
-Ki is a command-line interface for the version control and editing of `.anki2`
-collections as git repositories of markdown files.  Rather than providing an
-interactive UI like the Anki desktop client, ki aims to allow natural editing
-*in the filesystem*.
-
-In general, the purpose of ki is to allow users to work on large, complex Anki
-decks in exactly the same way they work on large, complex software projects.
-.. include:: ./DOCUMENTATION.md
-"""
-
-# pylint: disable=invalid-name, missing-class-docstring, broad-except
-# pylint: disable=too-many-return-statements, too-many-lines, too-many-arguments
-# pylint: disable=no-value-for-parameter, not-callable, unnecessary-lambda-assignment
-
-import os
-import re
-import gc
-import sys
-import time
-import json
-import copy
-import random
-import logging
-import sqlite3
-import hashlib
-import datetime
-import itertools
-import subprocess
-import configparser
-from pathlib import Path
-from itertools import chain, starmap, tee
-from functools import reduce
-from collections import namedtuple
-
-import git
-import click
-from lark import Lark
-
-# Required to avoid circular imports because the Anki pylib codebase is gross.
-import anki.collection
-from anki.cards import Card
-from anki.utils import ids2str
-from anki.models import NotetypeDict
-from anki.errors import NotFoundError
-from anki.collection import Collection, Note, OpChangesWithId
-from anki.importing.noteimp import NoteImporter
-
-from beartype import beartype
-from beartype.typing import (
-    Set,
-    List,
-    Dict,
-    Any,
-    Optional,
-    Callable,
-    Union,
-    TypeVar,
-    Tuple,
-    Iterator,
-    Iterable,
-    FrozenSet,
-)
-
-import ki.maybes as M
-import ki.functional as F
-from ki.types import (
-    MODELS_FILE,
-    File,
-    Dir,
-    EmptyDir,
-    NoPath,
-    NoFile,
-    GitChangeType,
-    Delta,
-    KiRepo,
-    Notetype,
-    ColNote,
-    KiRev,
-    Rev,
-    Deck,
-    Root,
-    DotKi,
-    CardFile,
-    NoteDBRow,
-    DeckNote,
-    NoteMetadata,
-    PushResult,
-    PlannedLink,
-    MediaBytes,
-    AddedMedia,
-    UpdatesRejectedError,
-    TargetExistsError,
-    CollectionChecksumError,
-    MissingNotetypeError,
-    NotetypeMismatchError,
-    NoteFieldValidationWarning,
-    DeletedFileNotFoundWarning,
-    DiffTargetFileNotFoundWarning,
-    NotetypeCollisionWarning,
-    SQLiteLockError,
-    MissingMediaDirectoryError,
-    WrongFieldCountWarning,
-    InconsistentFieldNamesWarning,
-    AnkiDBNoteMissingFieldsError,
-    RenamedMediaFileWarning,
-    NonEmptyWorkingTreeError,
-    EmptyNoteWarning,
-    DuplicateNoteWarning,
-    UnhealthyNoteWarning,
-    MediaDirectoryDeckNameCollisionWarning,
-    notetype_json,
-)
-from ki.maybes import (
-    GIT,
-    GITIGNORE_FILE,
-    GITMODULES_FILE,
-    KI,
-    HASHES_FILE,
-    BACKUPS_DIR,
-)
-from ki.transformer import NoteTransformer, FlatNote
-
-curried = F.curried
-
-logging.basicConfig(level=logging.INFO)
-
-TQ = F.progressbar
-
-T = TypeVar("T")
-NoteId, DeckId, CardId = int, int, int
-CardFileMap = Dict[DeckId, List[CardFile]]
-
-GITATTRS_FILE = ".gitattributes"
-
-UTF8 = "UTF-8"
-URLS = "(https?|ftp)://"
-MEDIA = M.MEDIA
-DEV_NULL = "/dev/null"
-BATCH_SIZE = 300
-HTML_REGEX = r"</?\s*[a-z-][^>]*\s*>|(\&(?:[\w\d]+|#\d+|#x[a-f\d]+);)"
-REMOTE_NAME = "anki"
-BRANCH_NAME = F.BRANCH_NAME
-MAX_FILENAME_LEN = 60
-IGNORE_DIRS = set([GIT, KI, MEDIA])
-IGNORE_FILES = set([GITIGNORE_FILE, GITMODULES_FILE, MODELS_FILE])
-HEAD_SUFFIX = Path("ki-head")
-LOCAL_SUFFIX = Path("ki-local")
-REMOTE_SUFFIX = Path("ki-remote")
-FIELD_HTML_SUFFIX = Path("ki-fieldhtml")
-LCA = "last-successful-ki-push"
-
-MEDIA_FILE_RECURSIVE_PATTERN = f"**/{MEDIA}/*"
-
-# This is the key for media files associated with notetypes instead of the
-# contents of a specific note.
-NOTETYPE_NID = -57
-
-MD = ".md"
-
-ALHPANUMERICS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
-SYMBOLS = "!#$%&()*+,-./:;<=>?@[]^_`{|}~"
-BASE91_TABLE = list(ALHPANUMERICS + SYMBOLS)
-
-ADDED = GitChangeType.ADDED
-RENAMED = GitChangeType.RENAMED
-DELETED = GitChangeType.DELETED
-MODIFIED = GitChangeType.MODIFIED
-TYPECHANGED = GitChangeType.TYPECHANGED
-
-
-@beartype
-def do(f: Callable[[Any], Any], xs: Iterable[Any]) -> None:
-    """Perform some action on an iterable."""
-    list(map(f, xs))
-
-
-@beartype
-def stardo(f: Callable[[Any], Any], xs: Iterable[Any]) -> None:
-    """Perform some action on an iterable of tuples, unpacking arguments."""
-    list(starmap(f, xs))
-
-
-@beartype
-def lock(col_file: File) -> sqlite3.Connection:
-    """Check that lock can be acquired on a SQLite3 database given a path."""
-    try:
-        con = sqlite3.connect(col_file, timeout=0.1)
-        con.isolation_level = "EXCLUSIVE"
-        con.execute("BEGIN EXCLUSIVE")
-    except sqlite3.DatabaseError as err:
-        raise SQLiteLockError(col_file, err) from err
-    if sys.platform == "win32":
-        con.commit()
-        con.close()
-    return con
-
-
-@beartype
-def unlock(con: sqlite3.Connection) -> None:
-    """Unlock a SQLite3 database."""
-    if sys.platform == "win32":
-        return
-    con.commit()
-    con.close()
-
-
-@beartype
-def cp_repo(rev: Rev, suffix: str) -> git.Repo:
-    """Get a temporary copy of a git repository in /tmp/<suffix>/."""
-    # Copy the entire repo into a temp directory ending in `../suffix/`.
-    target: NoFile = F.chk(F.mkdtemp() / suffix)
-    ephem = git.Repo(F.copytree(F.root(rev.repo), target))
-
-    # Do a reset --hard to the given SHA.
-    ephem.git.reset(rev.sha, hard=True)
-    return ephem
-
-
-@beartype
-def cp_ki(ki_rev: KiRev, suffix: str) -> KiRepo:
-    """
-    Given a KiRev, i.e. a pair of the form (kirepo, SHA), we clone
-    `kirepo.repo` into a temp directory and hard reset to the given commit
-    hash. Copies the .ki/ directory from `ki_rev.kirepo` without making any
-    changes.
-
-    Parameters
-    ----------
-    ki_rev : KiRev
-        The ki repository to clone, and a commit for it.
-    suffix : str
-        /tmp/.../ path suffix, e.g. `ki/local/`.
-
-    Returns
-    -------
-    KiRepo
-        The copied ki repository.
-    """
-    rev: Rev = F.ki_rev_to_rev(ki_rev)
-    print(F.root(rev.repo))
-    ephem: git.Repo = cp_repo(rev, suffix)
-    F.force_mkdir(F.root(ephem) / KI / BACKUPS_DIR)
-    kirepo: KiRepo = M.kirepo(F.root(ephem))
-    return kirepo
-
-
-@beartype
-def is_anki_note(path: File) -> bool:
-    """Check if file is a `ki`-style markdown note."""
-    # Ought to have markdown file extension.
-    if path.suffix != ".md":
-        return False
-    with open(path, "r", encoding=UTF8) as md_f:
-        lines = md_f.readlines()
-    if len(lines) < 8:
-        return False
-    if lines[0] != "# Note\n":
-        return False
-    if lines[1] != "```\n":
-        return False
-    if not re.match(r"^guid: ", lines[2]):
-        return False
-    return True
-
-
-@beartype
-def is_ignorable(root: Dir, path: Path) -> bool:
-    """
-    Filter out paths in a git repository diff that do not correspond to Anki
-    notes.
-
-    We could do this purely using calls to `is_anki_note()`, but these are
-    expensive, so we try to find matches without opening any files first.
-    """
-    # Ignore if `path` is an exact match for any of the patterns Since the
-    # contents of a git repository diff are always going to be files, this
-    # alone will not correctly ignore directory names given in `patterns`.
-    #
-    # If any of the patterns in `dirnames` resolve to one of the parents of
-    # `path`, return a warning, so that we are able to filter out entire
-    # directories.
-    filenames, dirnames = IGNORE_FILES, IGNORE_DIRS
-    if path.name in filenames | dirnames or len(set(path.parts) & dirnames) > 0:
-        return True
-
-    # If `path` is an extant file (not a directory) and *not* a note, ignore it.
-    file = F.chk(root / path)
-    if isinstance(file, File) and not is_anki_note(file):
-        return True
-    return False
-
-
-@curried
-@beartype
-def mungediff(
-    parse: Callable[[Delta], DeckNote], a_root: Dir, b_root: Dir, d: git.Diff
-) -> Iterable[Union[Delta, Warning]]:
-    """Extract deltas and warnings from a collection of diffs."""
-    a, b = d.a_path, d.b_path
-    a, b = a if a else b, b if b else a
-    if is_ignorable(a_root, Path(a)) or is_ignorable(b_root, Path(b)):
-        return []
-
-    # Get absolute and relative paths to 'a' and 'b'.
-    AB = namedtuple("AB", "a b")
-    files = AB(F.chk(a_root / a), F.chk(b_root / b))
-    rels = AB(Path(a), Path(b))
-
-    if d.change_type == DELETED.value:
-        if not F.isfile(files.a):
-            return [DeletedFileNotFoundWarning(rels.a)]
-        return [Delta(GitChangeType.DELETED, files.a, rels.a)]
-    if not F.isfile(files.b):
-        return [DiffTargetFileNotFoundWarning(rels.b)]
-    if d.change_type == RENAMED.value:
-        a_delta = Delta(GitChangeType.DELETED, files.a, rels.a)
-        b_delta = Delta(GitChangeType.ADDED, files.b, rels.b)
-        a_decknote, b_decknote = parse(a_delta), parse(b_delta)
-        if a_decknote.guid != b_decknote.guid:
-            return [a_delta, b_delta]
-    return [Delta(GitChangeType(d.change_type), files.b, rels.b)]
-
-
-@beartype
-def diff2(
-    repo: git.Repo,
-    parse: Callable[[Delta], DeckNote],
-) -> Iterable[Union[Delta, Warning]]:
-    """Diff `repo` from `HEAD~1` to `HEAD`."""
-    # We diff from A~B.
-    head1: Rev = M.rev(repo, repo.commit("HEAD~1").hexsha)
-    uuid = hex(random.randrange(16**4))[2:]
-    head1_repo = cp_repo(head1, suffix=f"HEAD~1-{uuid}")
-    a_root, b_root = F.root(head1_repo), F.root(repo)
-    diffidx = repo.commit("HEAD~1").diff(repo.commit("HEAD"))
-
-    # Get the diffs for each change type (e.g. 'DELETED').
-    return chain(*map(mungediff(parse, a_root, b_root), diffidx))
-
-
-@beartype
-def get_models_recursively(kirepo: KiRepo) -> Dict[str, Notetype]:
-    """
-    Find and merge all `models.json` files recursively. Returns a dictionary
-    sending model names to Notetypes.
-    """
-
-    @beartype
-    def load(file: File) -> Iterable[Notetype]:
-        """Load a models file."""
-        with open(file, "r", encoding=UTF8) as f:
-            return map(M.notetype, json.load(f).values())
-
-    notetypes = F.cat(map(load, F.rglob(kirepo.root, MODELS_FILE)))
-    return {notetype.name: notetype for notetype in notetypes}
-
-
-@beartype
-def check_fields_health(note: Note) -> List[Warning]:
-    """Construct warnings when Anki's fields health check fails."""
-    health = note.fields_check()
-    if health == 1:
-        return [EmptyNoteWarning(note, health)]
-    if health == 2:
-        return [DuplicateNoteWarning(note, health, html_to_screen(note.fields[0]))]
-    if health != 0:
-        return [UnhealthyNoteWarning(note, health)]
-    return []
-
-
-@beartype
-def get_guid(fields: List[str]) -> str:
-    """Construct a new GUID for a note. Adapted from genanki's `guid_for()`."""
-    # Get the first 8 bytes of the SHA256 of `contents` as an int.
-    m = hashlib.sha256()
-    m.update("__".join(fields).encode("utf-8"))
-    x = reduce(lambda h, b: (h << 8) + b, m.digest()[:8], 0)
-
-    # convert to the weird base91 format that Anki uses
-    chars = []
-    while x > 0:
-        chars.append(BASE91_TABLE[x % len(BASE91_TABLE)])
-        x //= len(BASE91_TABLE)
-    return "".join(reversed(chars))
-
-
-@curried
-@beartype
-def parse_note(parser: Lark, transformer: NoteTransformer, delta: Delta) -> DeckNote:
-    """Parse with lark."""
-    tree = parser.parse(delta.path.read_text(encoding=UTF8))
-    flatnote: FlatNote = transformer.transform(tree)
-    parts: Tuple[str, ...] = delta.relpath.parent.parts
-    deck: str = "::".join(parts)
-
-    # Generate a GUID from the hash of the field contents if the `guid` field
-    # in the note file was left blank.
-    fields = list(flatnote.fields.values())
-    guid = flatnote.guid if flatnote.guid != "" else get_guid(fields)
-
-    return DeckNote(
-        title=flatnote.title,
-        guid=guid,
-        deck=deck,
-        model=flatnote.model,
-        tags=flatnote.tags,
-        fields=flatnote.fields,
-    )
-
-
-@beartype
-def plain_to_html(plain: str) -> str:
-    """Convert plain text to html"""
-    # Minor clean up
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    plain = re.sub(r"\<i\>\s*\<\/i\>", "", plain)
-    plain = re.sub(r"\<div\>\s*\<\/div\>", "", plain)
-
-    # Convert newlines to `<br>` tags.
-    if not re.search(HTML_REGEX, plain):
-        plain = plain.replace("\n", "<br>")
-
-    return plain.strip()
-
-
-@curried
-@beartype
-def update_field(decknote: DeckNote, note: Note, key: str, field: str) -> None:
-    """Update a field contained in `note`."""
-    try:
-        note[key] = plain_to_html(field)
-    except IndexError as err:
-        raise AnkiDBNoteMissingFieldsError(decknote, note.id, key) from err
-
-
-@beartype
-def update_note(
-    note: Note, decknote: DeckNote, old_notetype: Notetype, new_notetype: Notetype
-) -> Iterable[Warning]:
-    """
-    Change all the data of `note` to that given in `decknote`.
-
-    This is only to be called on notes whose nid already exists in the
-    database.  Creates a new deck if `decknote.deck` doesn't exist.  Assumes
-    that the model has already been added to the collection, and raises an
-    exception if it finds otherwise.  Changes notetype to that specified by
-    `decknote.model`.  Overwrites all fields with `decknote.fields`.
-
-    Updates:
-    - tags
-    - deck
-    - model
-    - fields
-    """
-
-    # Check that the passed argument `new_notetype` has a name consistent with
-    # the model specified in `decknote`. The former should be derived from the
-    # latter, and if they don't match, there is a bug in the caller.
-    if decknote.model != new_notetype.name:
-        raise NotetypeMismatchError(decknote, new_notetype)
-
-    nid = note.id
-    note.tags = decknote.tags
-    note.flush()
-
-    # Set the deck of the given note, as well as all its cards, and create a
-    # deck with this name if it doesn't already exist. See the
-    # comments/docstrings in the implementation of the
-    # `anki.decks.DeckManager.id()` method.
-    newdid: int = note.col.decks.id(decknote.deck, create=True)
-    cids = [c.id for c in note.cards()]
-    if cids:
-        note.col.set_deck(cids, newdid)
-
-    # Set notetype (also clears all fields).
-    if old_notetype.id != new_notetype.id:
-        fmap = {field.ord: None for field in old_notetype.flds}
-        note.col.models.change(old_notetype.dict, [nid], new_notetype.dict, fmap, None)
-        note.load()
-
-    # Validate field keys against notetype.
-    warnings: List[Warning] = validate_decknote_fields(new_notetype, decknote)
-    if len(warnings) > 0:
-        return warnings
-
-    # Set field values and flush to collection database. This is correct
-    # because every field name that appears in `new_notetype` is contained in
-    # `decknote.fields`, or else we would have printed a warning and returned
-    # above.
-    missing = {key for key in decknote.fields if key not in note}
-    warnings = map(lambda k: NoteFieldValidationWarning(nid, k, new_notetype), missing)
-    fields = [(key, field) for key, field in decknote.fields.items() if key in note]
-    stardo(update_field(decknote, note), fields)
-    note.flush()
-
-    # Remove if unhealthy.
-    fwarns: List[Warning] = check_fields_health(note)
-    if len(fwarns) > 0:
-        note.col.remove_notes([nid])
-    return chain(warnings, fwarns)
-
-
-@beartype
-def validate_decknote_fields(notetype: Notetype, decknote: DeckNote) -> List[Warning]:
-    """Validate that the fields given in the note match the notetype."""
-    warnings: List[Warning] = []
-    names: List[str] = [field.name for field in notetype.flds]
-
-    # TODO: It might also be nice to print the path of the note in the
-    # repository. This would have to be added to the `DeckNote` spec.
-    if len(decknote.fields.keys()) != len(names):
-        warnings.append(WrongFieldCountWarning(decknote, names))
-
-    mk_warning = lambda n, k: InconsistentFieldNamesWarning(n, k, decknote)
-    names_and_keys = F.starfilter(
-        lambda n, k: n != k, zip(names, decknote.fields.keys())
-    )
-    return warnings + list(starmap(mk_warning, names_and_keys))
-
-
-@beartype
-def get_note_path(colnote: ColNote, deck_dir: Dir, card_name: str = "") -> NoFile:
-    """Get note path from sort field text."""
-    field_text = colnote.sfld
-
-    # Construct filename, stripping HTML tags and sanitizing (quickly).
-    field_text = plain_to_html(field_text)
-    field_text = re.sub("<[^<]+?>", "", field_text)
-
-    # If the HTML stripping removed all text, we just slugify the raw sort
-    # field text.
-    if len(field_text) == 0:
-        field_text = colnote.sfld
-
-    name = field_text[:MAX_FILENAME_LEN]
-    slug = F.slugify(name)
-
-    # If the slug is still empty, use all the fields.
-    if len(slug) == 0:
-        contents = " ".join(colnote.n.values())
-        name = contents[:MAX_FILENAME_LEN]
-        slug = F.slugify(name)
-
-    # Make it so `slug` cannot possibly be an empty string, because then we get
-    # a `Path('.')` which is a bug, and causes a runtime exception. If all else
-    # fails, use the notetype name, hash of the payload, and creation date.
-    if len(slug) == 0:
-        guidhex = colnote.n.guid.encode(UTF8).hex()
-        slug: str = f"{colnote.notetype.name}--{guidhex}"
-
-        # Note IDs are in milliseconds.
-        dt = datetime.datetime.fromtimestamp(colnote.n.id / 1000.0)
-        slug += "--" + dt.strftime("%Y-%m-%d--%Hh-%Mm-%Ss")
-        F.yellow(f"Slug for note with guid '{colnote.n.guid}' is empty...")
-        F.yellow(f"Using hex representation of guid in filename: '{slug}'")
-
-    if card_name != "":
-        slug = f"{slug}_{card_name}"
-    filename: str = f"{slug}{MD}"
-    note_path = F.chk(deck_dir / filename, resolve=False)
-
-    i = 1
-    while not isinstance(note_path, NoFile):
-        filename = f"{slug}_{i}{MD}"
-        note_path = F.chk(deck_dir / filename, resolve=False)
-        i += 1
-
-    return note_path
-
-
-@beartype
-def backup(kirepo: KiRepo) -> int:
-    """Backup collection to `.ki/backups`."""
-    timestamp = datetime.datetime.now().strftime("%Y-%m-%d--%Hh-%Mm-%Ss")
-    md5sum = F.md5(kirepo.col_file)
-    name = f"{timestamp}--{md5sum}.anki2"
-    backup_file = F.chk(kirepo.backups_dir / name)
-
-    # We assume here that no one would ever make e.g. a directory called
-    # `name`, since `name` contains the md5sum of the collection file, and
-    # thus that is extraordinarily improbable. So the only thing we have to
-    # check for is that we haven't already written a backup file to this
-    # location.
-    if isinstance(backup_file, File):
-        return 1
-
-    F.copyfile(kirepo.col_file, F.chk(kirepo.backups_dir / name))
-    return 0
-
-
-@beartype
-def append_md5sum(dotki: Dir, tag: str, md5sum: str) -> None:
-    """Append an md5sum hash to the hashes file."""
-    hashes_file = dotki / HASHES_FILE
-    with open(hashes_file, "a+", encoding=UTF8) as hashes_f:
-        hashes_f.write(f"{md5sum}  {tag}\n")
-
-
-@beartype
-def get_field_note_id(nid: int, fieldname: str) -> str:
-    """A str ID that uniquely identifies field-note pairs."""
-    return f"{nid}{F.slugify(fieldname)}"
-
-
-@beartype
-def add_db_note(
-    col: Collection,
-    nid: int,
-    guid: str,
-    mid: int,
-    mod: int,
-    usn: int,
-    tags: List[str],
-    fields: List[str],
-    sfld: str,
-    csum: int,
-    flags: int,
-    data: str,
-) -> Note:
-    """Add a note to the database directly, with a SQL INSERT."""
-    importer = NoteImporter(col, "")
-    importer.addNew(
-        [
-            (
-                nid,
-                guid,
-                mid,
-                mod,
-                usn,
-                " " + " ".join(tags) + " ",
-                "\x1f".join(fields),
-                sfld,
-                csum,
-                flags,
-                data,
-            )
-        ]
-    )
-
-    # All the `mark_modified` flag does is update `mod`. Since we always set
-    # `mod` to the current timestamp anyway, this doesn't matter, so may as
-    # well set it to `True` to reflect the semantics of the operation we're
-    # performing. This may present issues down the road since newly imported
-    # cards from cloned submodules will be marked modified on import/push,
-    # which is not exactly right. The anki2 importer does *not* mark as
-    # modified, because importing a new note does not modify its content. We
-    # would need to have `mod` data inside the note grammar in order for this
-    # to make sense, which may be more trouble than it's worth. Users writing
-    # new notes as markdown files would have to set the `mod` to some default
-    # value, or leave it blank. Assuming people don't do this nearly as often
-    # as they will export or push notes they've created in Anki, then it might
-    # make sense.
-    col.after_note_updates([nid], mark_modified=True)
-    return col.get_note(nid)
-
-
-@curried
-@beartype
-def push_note(
-    col: Collection,
-    timestamp_ns: int,
-    guids: Dict[str, NoteMetadata],
-    new_nids: Iterator[int],
-    decknote: DeckNote,
-) -> Iterable[Warning]:
-    """
-    Update the Anki `Note` object in `col` corresponding to `decknote`,
-    creating it if it does not already exist.
-
-    Raises
-    ------
-    MissingNotetypeError
-        If we can't find a notetype with the name provided in `decknote`.
-    """
-    # Notetype/model names are privileged in Anki, so if we don't find the
-    # right name, we raise an error.
-    model_id: Optional[int] = col.models.id_for_name(decknote.model)
-    if model_id is None:
-        raise MissingNotetypeError(decknote.model)
-    new_notetype: Notetype = M.notetype(col.models.get(model_id))
-
-    if decknote.guid in guids:
-        nid: int = guids[decknote.guid].nid
-        try:
-            note: Note = col.get_note(nid)
-        except NotFoundError as err:
-            print(f"{nid = }")
-            print(f"{decknote.guid = }")
-            raise err
-    else:
-        nid: int = next(new_nids)
-        note: Note = add_db_note(
-            col,
-            nid,
-            decknote.guid,
-            model_id,
-            mod=int(timestamp_ns // 1e9),
-            usn=-1,
-            tags=decknote.tags,
-            fields=list(decknote.fields.values()),
-            sfld=decknote.fields[new_notetype.sortf.name],
-            csum=0,
-            flags=0,
-            data="",
-        )
-
-    # If we are updating an existing note, we need to know the old and new
-    # notetypes, and then update the notetype (and the rest of the note data)
-    # accordingly.
-    old_notetype: Notetype = M.notetype(note.note_type())
-    return update_note(note, decknote, old_notetype, new_notetype)
-
-
-@beartype
-def get_header_lines(colnote) -> List[str]:
-    """Get header of markdown representation of note."""
-    lines = [
-        "# Note",
-        "```",
-        f"guid: {colnote.n.guid}",
-        f"notetype: {colnote.notetype.name}",
-        "```",
-        "",
-        "### Tags",
-        "```",
-    ]
-    lines += colnote.n.tags
-    lines += ["```", ""]
-    return lines
-
-
-@curried
-@beartype
-def localmedia(s: str, regex: str) -> Iterable[str]:
-    """Return local media filenames matching the given regex pattern."""
-    fnames = map(lambda m: m.group("fname"), re.finditer(regex, s))
-    fnames = map(lambda s: s.strip(), fnames)
-    return filter(lambda x: not re.match(URLS, x.lower()), fnames)
-
-
-@beartype
-def media_filenames_in_field(col: Collection, s: str) -> Iterable[str]:
-    """A copy of `MediaManager.files_in_str()`, but without LaTeX rendering."""
-    s = (s.strip()).replace('"', "")
-    return F.cat(map(localmedia(s), col.media.regexps))
-
-
-@curried
-@beartype
-def copy_note_media(
-    col: Collection, src: Dir, tgt: Dir, row: NoteDBRow
-) -> FrozenSet[File]:
-    """
-    Copy a single note's media files and return the copies as a set. We do this
-    by first filtering for only 'rootfiles', i.e. excluding media files in
-    subdirectories of the media directory. Then we take only those which exist,
-    i.e. typecheck as `File`. Then we construct the source and destination
-    paths, and finally actually perform the copy op, returning the result.
-
-    Note that `src` is the media directory where the files originate, and `tgt`
-    is the media directory we're copying to.
-    """
-    files: Iterable[str] = media_filenames_in_field(col, row.flds)
-    rootfiles = filter(lambda f: f == os.path.basename(f), files)
-    medias: Iterable[File] = filter(F.isfile, map(lambda f: F.chk(src / f), rootfiles))
-    srcdsts = map(lambda file: (file, F.chk(tgt / file.name)), medias)
-    return frozenset(starmap(F.copyfile, srcdsts))
-
-
-@curried
-@beartype
-def copy_notetype_media(
-    src: Dir, tgt: Dir, paths: Set[Path], m: NotetypeDict
-) -> FrozenSet[File]:
-    """Copy media from notetype `m` from source to target, returning set of copies."""
-    matches: Iterable[Path] = filter(lambda p: hasmedia(m, str(p)), paths)
-    medias = filter(F.isfile, map(lambda p: F.chk(src / p), matches))
-    srcdsts = map(lambda f: (f, F.chk(tgt / f.name)), medias)
-    return frozenset(starmap(F.copyfile, srcdsts))
-
-
-@beartype
-def copy_media_files(
-    col: Collection,
-    media_target_dir: EmptyDir,
-) -> Dict[int, Set[File]]:
-    """
-    Get a list of extant media files used in notes and notetypes, copy those
-    media files to the top-level `_media/` directory in the repository root,
-    and return a map sending note ids to sets of copied media files.
-
-    Adapted from code in `anki/pylib/anki/exporting.py`. Specifically, the
-    `AnkiExporter.exportInto()` function.
-
-    Parameters
-    ----------
-    col
-        Anki collection.
-    media_target_dir
-        Where media files are to be copied to.
-    """
-    # All note ids as a string for the SQL query.
-    strnids = ids2str(list(col.find_notes(query="")))
-
-    # This is the path to the media directory. In the original implementation
-    # of `AnkiExporter.exportInto()`, there is check made of the form
-    #
-    #   if self.mediaDir:
-    #
-    # before doing path manipulation with this string.
-    #
-    # Examining the `__init__()` function of `MediaManager`, we can see that
-    # `col.media.dir()` will only be `None` in the case where `server=True` is
-    # passed to the `Collection` constructor. But since we do the construction
-    # within ki, we have a guarantee that this will never be true, and thus we
-    # can assume it is a nonempty string, which is all we need for the
-    # following code to be safe.
-    media_dir = F.chk(Path(col.media.dir()))
-    if not isinstance(media_dir, Dir):
-        raise MissingMediaDirectoryError(col.path, media_dir)
-
-    # Find media files that appear in note fields and copy them to the target.
-    query: str = "select * from notes where id in " + strnids
-    rows: List[NoteDBRow] = [NoteDBRow(*row) for row in col.db.all(query)]
-    rows = TQ(rows, "Media")
-    copy_fn = copy_note_media(col, media_dir, media_target_dir)
-    media = {row.nid: copy_fn(row) for row in rows}
-    mids = col.db.list("select distinct mid from notes where id in " + strnids)
-
-    # Copy notetype template media files.
-    _, _, files = F.shallow_walk(media_dir)
-    paths: Iterable[Path] = map(lambda f: Path(f.name), files)
-    paths = set(filter(lambda f: str(f).startswith("_"), paths))
-    models = filter(lambda m: int(m["id"]) in mids, col.models.all())
-
-    mediasets = map(copy_notetype_media(media_dir, media_target_dir, paths), models)
-    media[NOTETYPE_NID] = reduce(lambda x, y: x.union(y), mediasets, set())
-
-    return media
-
-
-@beartype
-def hasmedia(model: NotetypeDict, fname: str) -> bool:
-    """
-    Check if a notetype has media.
-
-    Adapted from `anki.exporting.AnkiExporter._modelHasMedia()`, which is an
-    instance method, but does not make any use of `self`, and so could be a
-    staticmethod. It is a pure function.
-    """
-    # First check the styling.
-    if fname in model["css"]:
-        return True
-    # If no reference to fname then check the templates as well.
-    return any(map(lambda t: fname in t["qfmt"] or fname in t["afmt"], model["tmpls"]))
-
-
-@beartype
-def write_repository(
-    col_file: File,
-    targetdir: Dir,
-    dotki: DotKi,
-    media_target_dir: EmptyDir,
-) -> None:
-    """Write notes to appropriate directories in `targetdir`."""
-    # Create config file.
-    config = configparser.ConfigParser()
-    config["remote"] = {"path": col_file}
-    with open(dotki.config, "w", encoding=UTF8) as config_f:
-        config.write(config_f)
-
-    # Create temp directory for htmlfield text files.
-    tempdir: EmptyDir = F.mkdtemp()
-    root: EmptyDir = F.mksubdir(tempdir, FIELD_HTML_SUFFIX)
-
-    # ColNote-containing data structure, to be passed to `write_decks()`.
-    col: Collection = M.collection(col_file)
-    nids: Iterable[int] = TQ(col.find_notes(query=""), "Notes")
-    colnotes: Dict[int, ColNote] = {nid: M.colnote(col, nid) for nid in nids}
-    media: Dict[int, Set[File]] = copy_media_files(col, media_target_dir)
-
-    write_decks(
-        col=col,
-        targetdir=targetdir,
-        colnotes=colnotes,
-        media=media,
-    )
-
-    F.rmtree(root)
-    col.close(save=False)
-
-
-@beartype
-def postorder(node: Union[Root, Deck]) -> List[Deck]:
-    """
-    Post-order traversal. Guarantees that we won't process a node until we've
-    processed all its children.
-    """
-    descendants: List[Deck] = reduce(lambda xs, x: xs + postorder(x), node.children, [])
-    return descendants if isinstance(node, Root) else descendants + [node]
-
-
-@beartype
-def preorder(node: Union[Root, Deck]) -> List[Deck]:
-    """
-    Pre-order traversal. Guarantees that we won't process a node until
-    we've processed all its ancestors.
-    """
-    descendants: List[Deck] = reduce(lambda xs, x: xs + preorder(x), node.children, [])
-    return descendants if isinstance(node, Root) else [node] + descendants
-
-
-@beartype
-def write_decks(
-    col: Collection,
-    targetdir: Dir,
-    colnotes: Dict[int, ColNote],
-    media: Dict[int, Set[File]],
-) -> None:
-    """
-    The proper way to do this is a DFS traversal, perhaps recursively, which
-    will make it easier to keep things purely functional, accumulating the
-    model ids of the children in each node. For this, we must construct a tree
-    from the deck names.
-
-    Implement new `ColNote`-writing procedure, using `DeckTreeNode`s.
-
-    It must do the following for each deck:
-    - create the deck directory
-    - write the models.json file
-    - create and populate the media directory
-    - write the note payload for each note in the correct deck, exactly once
-
-    In other words, for each deck, we need to write all of its:
-    - models
-    - media
-    - notes
-
-    The first two are cumulative: we want the models and media of subdecks to
-    be included in their ancestors. The notes, however, should not be
-    cumulative. Indeed, we want each note to appear exactly once in the
-    entire repository, making allowances for the case where a single note's
-    cards are spread across multiple decks, in which case we must create a
-    symlink.
-
-    And actually, both of these cases are nicely taken care of for us by the
-    `DeckManager.cids()` function, which has a `children: bool` parameter
-    which toggles whether or not to get the card ids of subdecks or not.
-    """
-    # Accumulate pairs of model ids and notetype maps. The return type of the
-    # `ModelManager.get()` call below indicates that it may return `None`,
-    # but we know it will not because we are getting the notetype id straight
-    # from the Anki DB.
-    #
-    # Dump the models file for the whole repository.
-    models = {m.id: col.models.get(m.id) for m in col.models.all_names_and_ids()}
-    with open(targetdir / MODELS_FILE, "w", encoding=UTF8) as f:
-        json.dump(models, f, ensure_ascii=False, indent=4, sort_keys=True)
-
-    # Construct an iterable of all decks except the trivial deck.
-    root: Deck = M.tree(col, targetdir, col.decks.deck_tree())
-    collisions, decks = F.part(lambda d: MEDIA in d.fullname, postorder(root))
-    if any(True for _ in collisions):
-        warn(MediaDirectoryDeckNameCollisionWarning())
-    decks = list(decks)
-    deckmap = {d.fullname: d for d in decks}
-
-    # Write cards, models, and media to filesystem.
-    do(write_note(col, targetdir, deckmap), TQ(colnotes.values(), "Notes"))
-    do(write_models(col, models), TQ(decks, "Notetypes"))
-    symlink_media(col, root, targetdir, media)
-
-
-@curried
-@beartype
-def write_note(
-    col: Collection,
-    targetd: Dir,
-    deckmap: Dict[str, Deck],
-    colnote: ColNote,
-) -> File:
-    decknames = set(map(lambda c: c.col.decks.name(c.did), colnote.n.cards()))
-    sortf = colnote.sfld
-    if len(decknames) == 0:
-        raise ValueError(f"No cards for note: {sortf}")
-    if len(decknames) > 1:
-        raise ValueError(f"Cards for note {sortf} are in distinct decks: {decknames}")
-    fullname = decknames.pop()
-    parts = fullname.split("::")
-    if "_media" in parts:
-        raise ValueError(f"Bad deck name '{fullname}' (cannot contain '_media')")
-    deck: Deck = deckmap[fullname]
-    path: NoFile = get_note_path(colnote, deck.deckd)
-    payload: str = get_note_payload(colnote)
-    return F.write(path, payload)
-
-
-@curried
-@beartype
-def write_models(col: Collection, models: Dict[int, NotetypeDict], deck: Deck) -> None:
-    """Write the `models.json` file for the given deck."""
-    did: int = deck.did
-    deckd: Dir = deck.deckd
-    descendants: List[CardId] = col.decks.cids(did=did, children=True)
-    cards: List[Card] = list(map(col.get_card, descendants))
-    descendant_mids: Set[int] = {c.note().mid for c in cards}
-
-    # Write `models.json` for current deck.
-    deck_models = {mid: models[mid] for mid in descendant_mids}
-    with open(deckd / MODELS_FILE, "w", encoding=UTF8) as f:
-        json.dump(deck_models, f, ensure_ascii=False, indent=4, sort_keys=True)
-
-
-@beartype
-def mklink(targetd: Dir, colnote: ColNote, deckd: Dir, card: Card, file: File) -> None:
-    """Return a windows link for a card if one is necessary."""
-    note_path: NoFile = get_note_path(colnote, deckd, card.template()["name"])
-    M.link(targetd, PlannedLink(link=note_path, tgt=file))
-
-
-@beartype
-def parentmap(root: Union[Root, Deck]) -> Dict[str, Union[Root, Deck]]:
-    """Map deck fullnames to parent `Deck`s."""
-    parents = {child.fullname: root for child in root.children}
-    return parents | reduce(lambda x, y: x | y, map(parentmap, root.children), {})
-
-
-@curried
-@beartype
-def planned_link(
-    parents: Dict[str, Union[Root, Deck]], deck: Deck, media_file: File
-) -> Optional[PlannedLink]:
-    """Get the target of the to-be-created media symlink."""
-    link: Path = F.chk(deck.mediad / media_file.name, resolve=False)
-    if not isinstance(link, NoFile):
-        return None
-
-    parent: Union[Root, Deck] = parents[deck.fullname]
-    if isinstance(parent, Root):
-        tgt = media_file
-    else:
-        tgt = F.chk(parent.mediad / media_file.name, resolve=False)
-    return PlannedLink(link=link, tgt=tgt)
-
-
-@curried
-@beartype
-def symlink_deck_media(
-    col: Collection,
-    targetd: Dir,
-    media: Dict[int, Set[File]],
-    parents: Dict[str, Union[Root, Deck]],
-    deck: Deck,
-) -> None:
-    """Create chained symlinks for a single deck."""
-    # Get nids for all descendant notes with media.
-    descendants: List[CardId] = col.decks.cids(did=deck.did, children=True)
-    cards: Iterable[Card] = map(col.get_card, descendants)
-    nids: Set[NoteId] = {NOTETYPE_NID} | set(map(lambda c: c.nid, cards))
-
-    # Get link path and target for each media file, and create the links.
-    files = F.cat(map(lambda nid: media[nid], filter(lambda nid: nid in media, nids)))
-    plinks = filter(None, map(planned_link(parents, deck), files))
-    do(M.link(targetd), plinks)
-
-
-@beartype
-def symlink_media(
-    col: Collection,
-    root: Root,
-    targetd: Dir,
-    media: Dict[int, Set[File]],
-) -> None:
-    """Chain symlinks up the deck tree into top-level `<collection>/_media/`."""
-    decks: List[Deck] = preorder(root)
-    parents: Dict[str, Union[Root, Deck]] = parentmap(root)
-    return do(symlink_deck_media(col, targetd, media, parents), decks)
-
-
-@beartype
-def html_to_screen(html: str) -> str:
-    """
-    Convert html for a *single field* into plaintext, to be displayed within a
-    markdown file.
-
-    Does very litle (just converts HTML-escaped special characters like `<br>`
-    tags or `&nbsp;`s to their UTF-8 equivalents).
-    """
-    html = re.sub(r"\<style\>.*\<\/style\>", "", html, flags=re.S)
-    plain = html
-
-    # For convenience: Un-escape some common LaTeX constructs.
-    plain = plain.replace(r"\\\\", r"\\")
-    plain = plain.replace(r"\\{", r"\{")
-    plain = plain.replace(r"\\}", r"\}")
-    plain = plain.replace(r"\*}", r"*}")
-
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-
-    plain = plain.replace("<br>", "\n")
-    plain = plain.replace("<br/>", "\n")
-    plain = plain.replace("<br />", "\n")
-
-    # Unbreak lines within src attributes.
-    plain = re.sub('src= ?\n"', 'src="', plain)
-
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    return plain
-
-
-@curried
-@beartype
-def get_field_payload(col: Collection, name: str, content: str) -> List[str]:
-    """Get the lines of a markdown snippet for some Anki note field."""
-    text = col.media.escape_media_filenames(html_to_screen(content), unescape=True)
-    return [f"## {name}", text, ""]
-
-
-@beartype
-def get_note_payload(colnote: ColNote) -> str:
-    """
-    Return the markdown-converted contents of the Anki note represented by
-    `colnote` as a string.
-
-    A `ColNote` is a dataclass wrapper around a `Note` object which has been
-    loaded from the DB.
-    """
-    lines = get_header_lines(colnote)
-    lines += F.cat(starmap(get_field_payload(colnote.n.col), colnote.n.items()))
-    return "\n".join(lines)
-
-
-@beartype
-def git_pull(remote: str, branch: str, cwd: Dir) -> str:
-    """Pull remote into branch using a subprocess call."""
-    args = ["git", "pull", "-v", remote, branch]
-    p = subprocess.run(args, check=False, cwd=cwd, capture_output=True)
-    return f"{p.stdout.decode()}\n{p.stderr.decode()}"
-
-
-@beartype
-def echo(string: str, silent: bool = False) -> None:
-    """Call `click.secho()` with formatting."""
-    if not silent:
-        click.secho(string, bold=True)
-
-
-@beartype
-def warn(w: Warning) -> None:
-    """Call `click.secho()` with formatting (yellow)."""
-    click.secho(f"WARNING: {str(w)}", bold=True, fg="yellow")
-
-
-@beartype
-def get_target(cwd: Dir, col_file: File, directory: str) -> Tuple[EmptyDir, bool]:
-    """Create default target directory."""
-    path = F.chk(Path(directory) if directory != "" else cwd / col_file.stem)
-    new: bool = True
-    if isinstance(path, NoPath):
-        path.mkdir(parents=True)
-        return M.emptydir(path), new
-    if isinstance(path, EmptyDir):
-        new = False
-        return path, new
-    raise TargetExistsError(path)
-
-
-@beartype
-def echo_note_change_types(deltas: Iterable[Delta]) -> None:
-    """Write a table of git change types for notes to stdout."""
-    # pylint: disable=too-many-locals
-    is_change_type = lambda t: lambda d: d.status == t
-
-    vs, ws, xs, ys, zs = tee(deltas, 5)
-    adds = list(filter(is_change_type(ADDED), vs))
-    deletes = list(filter(is_change_type(DELETED), ws))
-    renames = list(filter(is_change_type(RENAMED), xs))
-    modifies = list(filter(is_change_type(MODIFIED), ys))
-    types = list(filter(is_change_type(TYPECHANGED), zs))
-
-    LPAD, RPAD = 15, 9
-    add_info: str = "ADD".ljust(LPAD) + str(len(adds)).rjust(RPAD)
-    delete_info: str = "DELETE".ljust(LPAD) + str(len(deletes)).rjust(RPAD)
-    modification_info: str = "MODIFY".ljust(LPAD) + str(len(modifies)).rjust(RPAD)
-    rename_info: str = "RENAME".ljust(LPAD) + str(len(renames)).rjust(RPAD)
-    type_info: str = "TYPE CHANGE".ljust(LPAD) + str(len(types)).rjust(RPAD)
-
-    echo("=" * (LPAD + RPAD))
-    echo("Note change types")
-    echo("-" * (LPAD + RPAD))
-    echo(f"{add_info}\n{delete_info}\n{modification_info}\n{rename_info}\n{type_info}")
-    echo("=" * (LPAD + RPAD))
-
-
-@curried
-@beartype
-def add_model(col: Collection, model: Notetype) -> None:
-    """Add a model to the database."""
-    # Check if a model already exists with this name, and get its `mid`.
-    mid: Optional[int] = col.models.id_for_name(model.name)
-
-    # TODO: This function is unfinished. We need to add new notetypes (and
-    # rename them) only if they are 'new', where new means they are different
-    # from anything else already in the DB, in the content-addressed sense. If
-    # they are new, then we must indicate that the notes we are adding actually
-    # have these new notetypes. For this, it may make sense to use the hash of
-    # the notetype everywhere (i.e. in the note file) rather than the name or
-    # mid.
-    #
-    # If a model already exists with this name, parse it, and check if its hash
-    # is identical to the model we are trying to add.
-    if mid is not None:
-        nt: NotetypeDict = col.models.get(mid)
-
-        # If we are trying to add a model that has the exact same content and
-        # name as an existing model, skip it.
-        existing: Notetype = M.notetype(nt)
-        if notetype_json(model) == notetype_json(existing):
-            return
-
-        # If the hashes don't match, then we somehow need to update
-        # `decknote.model` for the relevant notes.
-        warn(NotetypeCollisionWarning(model, existing))
-
-    nt_copy: NotetypeDict = copy.deepcopy(model.dict)
-    nt_copy["id"] = 0
-    changes: OpChangesWithId = col.models.add_dict(nt_copy)
-    nt: NotetypeDict = col.models.get(changes.id)
-    model: Notetype = M.notetype(nt)
-    echo(f"Added model '{model.name}'")
-
-
-@beartype
-def mediadata(col: Collection, fname: str) -> bytes:
-    """Get media file content as bytes (empty if missing)."""
-    if not col.media.have(fname):
-        return b""
-    path = os.path.join(col.media.dir(), fname)
-    try:
-        with open(path, "rb") as f:
-            return f.read()
-    except OSError:
-        return b""
-
-
-@beartype
-def get_note_metadata(col: Collection) -> Dict[str, NoteMetadata]:
-    """
-    Construct a map from guid -> (nid, mod, mid), adapted from
-    `Anki2Importer._import_notes()`. Note that `mod` is the modification
-    timestamp, in epoch seconds (timestamp of when the note was last modified).
-    """
-    guids: Dict[str, NoteMetadata] = {}
-    for nid, guid, mod, mid in col.db.execute("select id, guid, mod, mid from notes"):
-        guids[guid] = NoteMetadata(nid, mod, mid)
-    return guids
-
-
-@curried
-@beartype
-def mediabytes(col: Collection, file: File) -> MediaBytes:
-    """Get old bytes (from collection) and new bytes (from file) for media file."""
-    old: bytes = mediadata(col, file.name)
-    new: bytes = file.read_bytes()
-    return MediaBytes(file=file, old=old, new=new)
-
-
-@curried
-@beartype
-def addmedia(col: Collection, m: MediaBytes) -> AddedMedia:
-    """Add a media file to collection (possibly renaming)."""
-    return AddedMedia(file=m.file, new_name=col.media.add_file(m.file))
-
-
-@beartype
-def commit_hashes_file(kirepo: KiRepo) -> None:
-    """Add and commit hashes file."""
-    kirepo.repo.index.add(f"{KI}/{HASHES_FILE}")
-    kirepo.repo.index.commit("Update collection hashes file.")
-
-
-@beartype
-def cleanup(targetdir: Dir, new: bool) -> Union[Dir, EmptyDir, NoPath]:
-    """Cleans up after failed clone operations."""
-    try:
-        if new:
-            return F.rmtree(targetdir)
-        _, dirs, files = F.shallow_walk(targetdir)
-        do(F.rmtree, dirs)
-        do(os.remove, files)
-    except PermissionError as _:
-        pass
-    return F.chk(targetdir)
-
-
-@click.group()
-@click.version_option()
-@beartype
-def ki() -> None:
-    """
-    The universal CLI entry point for `ki`.
-
-    Takes no arguments, only has three subcommands (clone, pull, push).
-    """
-    return
-
-
-@ki.command()
-@click.argument("collection")
-@click.argument("directory", required=False, default="")
-def clone(collection: str, directory: str = "") -> None:
-    """Clone an Anki collection into a directory."""
-    _clone1(collection, directory)
-
-
-@beartype
-def _clone1(collection: str, directory: str = "") -> git.Repo:
-    """Execute a clone op."""
-    col_file: File = M.xfile(Path(collection))
-    # Write all files to `targetdir`, and instantiate a `KiRepo` object.
-    targetdir, new = get_target(F.cwd(), col_file, directory)
-    try:
-        _, _ = _clone2(col_file, targetdir, msg="Initial commit", silent=False)
-        kirepo: KiRepo = M.kirepo(targetdir)
-        kirepo.repo.create_tag(LCA)
-        kirepo.repo.close()
-        gc.collect()
-        return kirepo.repo
-    except Exception as err:
-        cleanup(targetdir, new)
-        raise err
-
-
-@beartype
-def _clone2(
-    col_file: File,
-    targetdir: EmptyDir,
-    msg: str,
-    silent: bool,
-) -> Tuple[git.Repo, str]:
-    """
-    Clone an Anki collection into a directory.
-
-    The caller expects that `targetdir` will be the root of a valid ki
-    repository after this function is called, so we need to do our repo
-    initialization with gitpython in here, as opposed to in `clone()`.
-
-    Parameters
-    ----------
-    col_file : pathlib.Path
-        The path to an `.anki2` collection file.
-    targetdir : pathlib.Path
-        A path to a directory to clone the collection into.
-        Note: we check that this directory is empty.
-    msg : str
-        Message for initial commit.
-    silent : bool
-        Whether to suppress progress information printed to stdout.
-
-    Returns
-    -------
-    repo : git.Repo
-        The cloned repository.
-    branch_name : str
-        The name of the default branch.
-    """
-    kidir, mediadir = M.empty_kirepo(targetdir)
-    dotki: DotKi = M.dotki(kidir)
-    md5sum = F.md5(col_file)
-    echo(f"Cloning into '{targetdir}'...", silent=silent)
-    (targetdir / GITIGNORE_FILE).write_text(f"{KI}/{BACKUPS_DIR}\n")
-    (targetdir / GITATTRS_FILE).write_text("*.md linguist-detectable\n")
-
-    # Write note files to disk.
-    write_repository(col_file, targetdir, dotki, mediadir)
-    repo, branch = F.init(targetdir)
-
-    # Store a checksum of the Anki collection file in the hashes file.
-    append_md5sum(kidir, col_file.name, md5sum)
-
-    F.commitall(repo, msg)
-    if repo.is_dirty():
-        raise NonEmptyWorkingTreeError(repo)
-    return repo, branch
-
-
-@ki.command()
-@beartype
-def pull() -> None:
-    """Pull changes into the current ki repository from an Anki collection."""
-    _pull1()
-
-
-@beartype
-def _pull1() -> None:
-    """Execute a pull op."""
-    # Check that we are inside a ki repository, and get the associated collection.
-    kirepo: KiRepo = M.kirepo(F.cwd())
-    con: sqlite3.Connection = lock(kirepo.col_file)
-    md5sum: str = F.md5(kirepo.col_file)
-    hashes: List[str] = kirepo.hashes_file.read_text(encoding=UTF8).split("\n")
-    hashes = list(filter(lambda l: l != "", hashes))
-    if md5sum in hashes[-1]:
-        echo("ki pull: up to date.")
-        unlock(con)
-        return
-
-    _pull2(kirepo)
-    unlock(con)
-
-
-@beartype
-def _pull2(kirepo: KiRepo) -> None:
-    """
-    Pull into `kirepo` without checking if we are already up-to-date.
-
-    Load the git repository at `anki_remote_root`, force pull (preferring
-    'theirs', i.e. the new stuff from the sqlite3 database) changes from that
-    repository (which is cloned straight from the collection, which in general
-    may have new changes) into `lca_repo`, and then pull `lca_repo` into the
-    main repository.
-
-    We pull in this sequence in order to avoid merge conflicts. Since we first
-    pull into a snapshot of the repository as it looked when we last pushed to
-    the database, we know that there cannot be any merge conflicts, because to
-    git, it just looks like we haven't made any changes since then. Then we
-    pull the result of that merge into our actual repository. So there could
-    still be merge conflicts at that point, but they will only be 'genuine'
-    merge conflicts in some sense, because as a result of using this snapshot
-    strategy, we give the anki collection the appearance of being a persistent
-    remote git repo. If we didn't do this, the fact that we did a fresh clone
-    of the database every time would mean that everything would look like a
-    merge conflict, because there is no shared history.
-
-    Parameters
-    ----------
-    kirepo : KiRepo
-        A dataclass representing the Ki repository in the cwd.
-
-    Raises
-    ------
-    CollectionChecksumError
-        If the Anki collection file was modified while pulling changes. This is
-        very unlikely, since the caller acquires a lock on the SQLite3
-        database.
-    """
-    # pylint: disable=too-many-locals
-    md5sum: str = F.md5(kirepo.col_file)
-
-    # Copy `repo` into a temp directory and `reset --hard` at rev of last
-    # successful `push()`, which is the last common ancestor, or 'LCA'.
-    head: Rev = M.head(kirepo.repo)
-    rev: Rev = M.rev(kirepo.repo, sha=kirepo.repo.tag(LCA).commit.hexsha)
-    lca_repo: git.Repo = cp_repo(rev, f"{LOCAL_SUFFIX}-{md5sum}")
-
-    # Clone collection into a temp directory at `anki_remote_root`.
-    anki_remote_root: EmptyDir = F.mksubdir(F.mkdtemp(), REMOTE_SUFFIX / md5sum)
-    msg = f"Fetch changes from DB at `{kirepo.col_file}` with md5sum `{md5sum}`"
-    remote_repo, branch = _clone2(kirepo.col_file, anki_remote_root, msg, silent=False)
-
-    # Create git remote pointing to `remote_repo`, which represents the current
-    # state of the Anki SQLite3 database, and pull it into `lca_repo`.
-    anki_remote = lca_repo.create_remote(REMOTE_NAME, F.gitd(remote_repo))
-    anki_remote.fetch()
-
-    # Handle deleted files, preferring `theirs`.
-    diffidx = lca_repo.commit("HEAD").diff(lca_repo.commit("FETCH_HEAD"))
-    dels: Iterable[git.Diff] = diffidx.iter_change_type(DELETED.value)
-    dels = filter(lambda d: d.a_path != GITMODULES_FILE, dels)
-    dels = filter(lambda d: F.isfile(F.chk(F.root(lca_repo) / d.a_path)), dels)
-    a_paths: Iterable[str] = set(map(F.git_rm(lca_repo), map(lambda d: d.a_path, dels)))
-
-    if len(a_paths) > 0:
-        details: str = "".join(map(lambda a: f"Remove '{a}'\n", a_paths))
-        F.commitall(lca_repo, msg=f"Remove files deleted in remote.\n\n{details}")
-
-    remote_root: Dir = F.root(remote_repo)
-    lca_repo = M.gitcopy(lca_repo, remote_root, unsub=False)
-    F.commitall(lca_repo, f"Pull changes from repository at `{remote_root}`")
-
-    # Create remote pointing to `lca_repo` and pull into `repo`. Note
-    # that this `git pull` may not always create a merge commit, because a
-    # fast-forward only updates the branch pointer.
-    lca_remote = kirepo.repo.create_remote(REMOTE_NAME, lca_repo.git_dir)
-    kirepo.repo.git.config("pull.rebase", "false")
-    out = git_pull(REMOTE_NAME, branch, kirepo.root)
-    echo(out)
-    kirepo.repo.delete_remote(lca_remote)
-
-    # The merge will have overwritten the hashes file with only the collection
-    # hash from the fresh clone of the remote, so we checkout its state from
-    # before the merge.
-    kirepo.repo.git.checkout([head.sha, "--", f"{KI}/{HASHES_FILE}"])
-
-    # Raise an error if the collection was modified during pull.
-    if F.md5(kirepo.col_file) != md5sum:
-        raise CollectionChecksumError(kirepo.col_file)
-
-    # Append the hash of the collection to the hashes file.
-    if "Aborting" not in out:
-        append_md5sum(kirepo.ki, kirepo.col_file.name, md5sum)
-        commit_hashes_file(kirepo)
-
-
-# PUSH
-
-
-@ki.command()
-@beartype
-def push() -> None:
-    """Push commits from the currrent ki repository into an Anki collection."""
-    _push()
-
-
-@beartype
-def _push() -> PushResult:
-    """Execute a push op."""
-    # pylint: disable=too-many-locals
-    # Check that we are inside a ki repository, and load collection.
-    kirepo: KiRepo = M.kirepo(F.cwd())
-    con: sqlite3.Connection = lock(kirepo.col_file)
-
-    md5sum: str = F.md5(kirepo.col_file)
-    hashes: List[str] = kirepo.hashes_file.read_text(encoding=UTF8).split("\n")
-    hashes = list(filter(lambda l: l != "", hashes))
-    if md5sum not in hashes[-1]:
-        raise UpdatesRejectedError(kirepo.col_file)
-
-    head_kirepo: KiRepo = cp_ki(M.head_ki(kirepo), f"{HEAD_SUFFIX}-{md5sum}")
-    remote_root: EmptyDir = F.mksubdir(F.mkdtemp(), REMOTE_SUFFIX / md5sum)
-
-    msg = f"Fetch changes from collection '{kirepo.col_file}' with md5sum '{md5sum}'"
-    remote_repo, _ = _clone2(kirepo.col_file, remote_root, msg, silent=True)
-
-    remote_repo = M.gitcopy(remote_repo, head_kirepo.root, unsub=True)
-    F.commitall(remote_repo, f"Pull changes from repository at `{kirepo.root}`")
-
-    parse: Callable[[Delta], DeckNote] = parse_note(*M.parser_and_transformer())
-    deltas, warnings = F.part(lambda x: isinstance(x, Delta), diff2(remote_repo, parse))
-    do(warn, warnings)
-
-    # If there are no changes, quit.
-    if len(set(deltas)) == 0:
-        echo("ki push: up to date.")
-        unlock(con)
-        return PushResult.UP_TO_DATE
-
-    echo(f"Pushing to '{kirepo.col_file}'")
-    models: Dict[str, Notetype] = get_models_recursively(head_kirepo)
-    return write_collection(deltas, models, kirepo, parse, head_kirepo, con)
-
-
-@beartype
-def write_collection(
-    deltas: Iterable[Delta],
-    models: Dict[str, Notetype],
-    kirepo: KiRepo,
-    parse: Callable[[Delta], DeckNote],
-    head_kirepo: KiRepo,
-    con: sqlite3.Connection,
-) -> PushResult:
-    """Push a list of `Delta`s to an Anki collection."""
-    # pylint: disable=too-many-locals
-    # Copy collection to a temp directory.
-    temp_col_dir: Dir = F.mkdtemp()
-    new_col_file = temp_col_dir / kirepo.col_file.name
-    col_name: str = kirepo.col_file.name
-    new_col_file: NoFile = F.chk(temp_col_dir / col_name)
-    new_col_file: File = F.copyfile(kirepo.col_file, new_col_file)
-
-    # Open collection and add new models to root `models.json` file.
-    col: Collection = M.collection(new_col_file)
-    do(add_model(col), models.values())
-
-    # Stash both unstaged and staged files (including untracked).
-    head_kirepo.repo.git.stash(include_untracked=True, keep_index=True)
-    head_kirepo.repo.git.reset("HEAD", hard=True)
-
-    # Display table of note change type counts and partition deltas into
-    # 'deletes' and 'not deletes'.
-    xs, ys, zs = tee(deltas, 3)
-    echo_note_change_types(xs)
-    dels: Iterable[Delta] = filter(lambda d: d.status == DELETED, ys)
-    deltas: Iterable[Delta] = filter(lambda d: d.status != DELETED, zs)
-
-    # Map guid -> (nid, mod, mid).
-    guids: Dict[str, NoteMetadata] = get_note_metadata(col)
-
-    # Parse to-be-deleted notes and remove them from collection.
-    del_guids: Iterable[str] = map(lambda dd: dd.guid, map(parse, dels))
-    del_guids = set(filter(lambda g: g in guids, del_guids))
-    del_nids: Iterable[NoteId] = map(lambda g: guids[g].nid, del_guids)
-    col.remove_notes(list(del_nids))
-
-    # Push changes for all other notes.
-    guids = {k: v for k, v in guids.items() if k not in del_guids}
-    timestamp_ns: int = time.time_ns()
-    new_nids: Iterator[int] = itertools.count(int(timestamp_ns / 1e6))
-    decknotes: Iterable[DeckNote] = map(parse, deltas)
-    do(warn, F.cat(map(push_note(col, timestamp_ns, guids, new_nids), decknotes)))
-
-    # It is always safe to save changes to the DB, since the DB is a copy.
-    col.close(save=True)
-
-    # Backup collection file and overwrite collection.
-    backup(kirepo)
-    F.copyfile(new_col_file, kirepo.col_file)
-    echo(f"Overwrote '{kirepo.col_file}'")
-
-    # Add media files to collection.
-    col: Collection = M.collection(kirepo.col_file)
-    media_files = F.rglob(head_kirepo.root, MEDIA_FILE_RECURSIVE_PATTERN)
-    mbytes: Iterable[MediaBytes] = map(mediabytes(col), media_files)
-
-    # Skip media files whose twin in collection has same name and same data.
-    mbytes = filter(lambda m: m.old == b"" or m.old != m.new, mbytes)
-
-    # Add (and possibly rename) media paths.
-    renames = filter(lambda a: a.file.name != a.new_name, map(addmedia(col), mbytes))
-    warnings = map(lambda r: RenamedMediaFileWarning(r.file.name, r.new_name), renames)
-    do(warn, warnings)
-    col.close(save=True)
-
-    # Append and commit collection checksum to hashes file.
-    append_md5sum(kirepo.ki, kirepo.col_file.name, F.md5(kirepo.col_file))
-    commit_hashes_file(kirepo)
-
-    # Update commit SHA of most recent successful PUSH and unlock SQLite DB.
-    kirepo.repo.delete_tag(LCA)
-    kirepo.repo.create_tag(LCA)
-    unlock(con)
-    return PushResult.NONTRIVIAL
-
-
-
-

Sub-modules

-
-
ki.functional
-
-

Type-safe, non Anki-specific functions.

-
-
ki.maybes
-
-

Factory functions for safely handling errors in type construction.

-
-
ki.transformer
-
-

A Lark transformer for the ki note grammar.

-
-
ki.types
-
-

Types for ki.

-
-
-
-
-
-
-

Functions

-
-
-def add_db_note(col: anki.collection.Collection, nid: int, guid: str, mid: int, mod: int, usn: int, tags: list[str], fields: list[str], sfld: str, csum: int, flags: int, data: str) ‑> anki.notes.Note -
-
-

Add a note to the database directly, with a SQL INSERT.

-
- -Expand source code - -
@beartype
-def add_db_note(
-    col: Collection,
-    nid: int,
-    guid: str,
-    mid: int,
-    mod: int,
-    usn: int,
-    tags: List[str],
-    fields: List[str],
-    sfld: str,
-    csum: int,
-    flags: int,
-    data: str,
-) -> Note:
-    """Add a note to the database directly, with a SQL INSERT."""
-    importer = NoteImporter(col, "")
-    importer.addNew(
-        [
-            (
-                nid,
-                guid,
-                mid,
-                mod,
-                usn,
-                " " + " ".join(tags) + " ",
-                "\x1f".join(fields),
-                sfld,
-                csum,
-                flags,
-                data,
-            )
-        ]
-    )
-
-    # All the `mark_modified` flag does is update `mod`. Since we always set
-    # `mod` to the current timestamp anyway, this doesn't matter, so may as
-    # well set it to `True` to reflect the semantics of the operation we're
-    # performing. This may present issues down the road since newly imported
-    # cards from cloned submodules will be marked modified on import/push,
-    # which is not exactly right. The anki2 importer does *not* mark as
-    # modified, because importing a new note does not modify its content. We
-    # would need to have `mod` data inside the note grammar in order for this
-    # to make sense, which may be more trouble than it's worth. Users writing
-    # new notes as markdown files would have to set the `mod` to some default
-    # value, or leave it blank. Assuming people don't do this nearly as often
-    # as they will export or push notes they've created in Anki, then it might
-    # make sense.
-    col.after_note_updates([nid], mark_modified=True)
-    return col.get_note(nid)
-
-
-
-def add_model(col: anki.collection.Collection, model: Notetype) ‑> None -
-
-

Add a model to the database.

-
- -Expand source code - -
@curried
-@beartype
-def add_model(col: Collection, model: Notetype) -> None:
-    """Add a model to the database."""
-    # Check if a model already exists with this name, and get its `mid`.
-    mid: Optional[int] = col.models.id_for_name(model.name)
-
-    # TODO: This function is unfinished. We need to add new notetypes (and
-    # rename them) only if they are 'new', where new means they are different
-    # from anything else already in the DB, in the content-addressed sense. If
-    # they are new, then we must indicate that the notes we are adding actually
-    # have these new notetypes. For this, it may make sense to use the hash of
-    # the notetype everywhere (i.e. in the note file) rather than the name or
-    # mid.
-    #
-    # If a model already exists with this name, parse it, and check if its hash
-    # is identical to the model we are trying to add.
-    if mid is not None:
-        nt: NotetypeDict = col.models.get(mid)
-
-        # If we are trying to add a model that has the exact same content and
-        # name as an existing model, skip it.
-        existing: Notetype = M.notetype(nt)
-        if notetype_json(model) == notetype_json(existing):
-            return
-
-        # If the hashes don't match, then we somehow need to update
-        # `decknote.model` for the relevant notes.
-        warn(NotetypeCollisionWarning(model, existing))
-
-    nt_copy: NotetypeDict = copy.deepcopy(model.dict)
-    nt_copy["id"] = 0
-    changes: OpChangesWithId = col.models.add_dict(nt_copy)
-    nt: NotetypeDict = col.models.get(changes.id)
-    model: Notetype = M.notetype(nt)
-    echo(f"Added model '{model.name}'")
-
-
-
-def addmedia(col: anki.collection.Collection, m: MediaBytes) ‑> AddedMedia -
-
-

Add a media file to collection (possibly renaming).

-
- -Expand source code - -
@curried
-@beartype
-def addmedia(col: Collection, m: MediaBytes) -> AddedMedia:
-    """Add a media file to collection (possibly renaming)."""
-    return AddedMedia(file=m.file, new_name=col.media.add_file(m.file))
-
-
-
-def append_md5sum(dotki: Dir, tag: str, md5sum: str) ‑> None -
-
-

Append an md5sum hash to the hashes file.

-
- -Expand source code - -
@beartype
-def append_md5sum(dotki: Dir, tag: str, md5sum: str) -> None:
-    """Append an md5sum hash to the hashes file."""
-    hashes_file = dotki / HASHES_FILE
-    with open(hashes_file, "a+", encoding=UTF8) as hashes_f:
-        hashes_f.write(f"{md5sum}  {tag}\n")
-
-
-
-def backup(kirepo: KiRepo) ‑> int -
-
-

Backup collection to .ki/backups.

-
- -Expand source code - -
@beartype
-def backup(kirepo: KiRepo) -> int:
-    """Backup collection to `.ki/backups`."""
-    timestamp = datetime.datetime.now().strftime("%Y-%m-%d--%Hh-%Mm-%Ss")
-    md5sum = F.md5(kirepo.col_file)
-    name = f"{timestamp}--{md5sum}.anki2"
-    backup_file = F.chk(kirepo.backups_dir / name)
-
-    # We assume here that no one would ever make e.g. a directory called
-    # `name`, since `name` contains the md5sum of the collection file, and
-    # thus that is extraordinarily improbable. So the only thing we have to
-    # check for is that we haven't already written a backup file to this
-    # location.
-    if isinstance(backup_file, File):
-        return 1
-
-    F.copyfile(kirepo.col_file, F.chk(kirepo.backups_dir / name))
-    return 0
-
-
-
-def check_fields_health(note: anki.notes.Note) ‑> list[Warning] -
-
-

Construct warnings when Anki's fields health check fails.

-
- -Expand source code - -
@beartype
-def check_fields_health(note: Note) -> List[Warning]:
-    """Construct warnings when Anki's fields health check fails."""
-    health = note.fields_check()
-    if health == 1:
-        return [EmptyNoteWarning(note, health)]
-    if health == 2:
-        return [DuplicateNoteWarning(note, health, html_to_screen(note.fields[0]))]
-    if health != 0:
-        return [UnhealthyNoteWarning(note, health)]
-    return []
-
-
-
-def cleanup(targetdir: Dir, new: bool) ‑> Union[DirEmptyDirNoPath] -
-
-

Cleans up after failed clone operations.

-
- -Expand source code - -
@beartype
-def cleanup(targetdir: Dir, new: bool) -> Union[Dir, EmptyDir, NoPath]:
-    """Cleans up after failed clone operations."""
-    try:
-        if new:
-            return F.rmtree(targetdir)
-        _, dirs, files = F.shallow_walk(targetdir)
-        do(F.rmtree, dirs)
-        do(os.remove, files)
-    except PermissionError as _:
-        pass
-    return F.chk(targetdir)
-
-
-
-def commit_hashes_file(kirepo: KiRepo) ‑> None -
-
-

Add and commit hashes file.

-
- -Expand source code - -
@beartype
-def commit_hashes_file(kirepo: KiRepo) -> None:
-    """Add and commit hashes file."""
-    kirepo.repo.index.add(f"{KI}/{HASHES_FILE}")
-    kirepo.repo.index.commit("Update collection hashes file.")
-
-
-
-def copy_media_files(col: anki.collection.Collection, media_target_dir: EmptyDir) ‑> dict[int, set[File]] -
-
-

Get a list of extant media files used in notes and notetypes, copy those -media files to the top-level _media/ directory in the repository root, -and return a map sending note ids to sets of copied media files.

-

Adapted from code in anki/pylib/anki/exporting.py. Specifically, the -AnkiExporter.exportInto() function.

-

Parameters

-
-
col
-
Anki collection.
-
media_target_dir
-
Where media files are to be copied to.
-
-
- -Expand source code - -
@beartype
-def copy_media_files(
-    col: Collection,
-    media_target_dir: EmptyDir,
-) -> Dict[int, Set[File]]:
-    """
-    Get a list of extant media files used in notes and notetypes, copy those
-    media files to the top-level `_media/` directory in the repository root,
-    and return a map sending note ids to sets of copied media files.
-
-    Adapted from code in `anki/pylib/anki/exporting.py`. Specifically, the
-    `AnkiExporter.exportInto()` function.
-
-    Parameters
-    ----------
-    col
-        Anki collection.
-    media_target_dir
-        Where media files are to be copied to.
-    """
-    # All note ids as a string for the SQL query.
-    strnids = ids2str(list(col.find_notes(query="")))
-
-    # This is the path to the media directory. In the original implementation
-    # of `AnkiExporter.exportInto()`, there is check made of the form
-    #
-    #   if self.mediaDir:
-    #
-    # before doing path manipulation with this string.
-    #
-    # Examining the `__init__()` function of `MediaManager`, we can see that
-    # `col.media.dir()` will only be `None` in the case where `server=True` is
-    # passed to the `Collection` constructor. But since we do the construction
-    # within ki, we have a guarantee that this will never be true, and thus we
-    # can assume it is a nonempty string, which is all we need for the
-    # following code to be safe.
-    media_dir = F.chk(Path(col.media.dir()))
-    if not isinstance(media_dir, Dir):
-        raise MissingMediaDirectoryError(col.path, media_dir)
-
-    # Find media files that appear in note fields and copy them to the target.
-    query: str = "select * from notes where id in " + strnids
-    rows: List[NoteDBRow] = [NoteDBRow(*row) for row in col.db.all(query)]
-    rows = TQ(rows, "Media")
-    copy_fn = copy_note_media(col, media_dir, media_target_dir)
-    media = {row.nid: copy_fn(row) for row in rows}
-    mids = col.db.list("select distinct mid from notes where id in " + strnids)
-
-    # Copy notetype template media files.
-    _, _, files = F.shallow_walk(media_dir)
-    paths: Iterable[Path] = map(lambda f: Path(f.name), files)
-    paths = set(filter(lambda f: str(f).startswith("_"), paths))
-    models = filter(lambda m: int(m["id"]) in mids, col.models.all())
-
-    mediasets = map(copy_notetype_media(media_dir, media_target_dir, paths), models)
-    media[NOTETYPE_NID] = reduce(lambda x, y: x.union(y), mediasets, set())
-
-    return media
-
-
-
-def copy_note_media(col: anki.collection.Collection, src: Dir, tgt: Dir, row: NoteDBRow) ‑> frozenset[File] -
-
-

Copy a single note's media files and return the copies as a set. We do this -by first filtering for only 'rootfiles', i.e. excluding media files in -subdirectories of the media directory. Then we take only those which exist, -i.e. typecheck as File. Then we construct the source and destination -paths, and finally actually perform the copy op, returning the result.

-

Note that src is the media directory where the files originate, and tgt -is the media directory we're copying to.

-
- -Expand source code - -
@curried
-@beartype
-def copy_note_media(
-    col: Collection, src: Dir, tgt: Dir, row: NoteDBRow
-) -> FrozenSet[File]:
-    """
-    Copy a single note's media files and return the copies as a set. We do this
-    by first filtering for only 'rootfiles', i.e. excluding media files in
-    subdirectories of the media directory. Then we take only those which exist,
-    i.e. typecheck as `File`. Then we construct the source and destination
-    paths, and finally actually perform the copy op, returning the result.
-
-    Note that `src` is the media directory where the files originate, and `tgt`
-    is the media directory we're copying to.
-    """
-    files: Iterable[str] = media_filenames_in_field(col, row.flds)
-    rootfiles = filter(lambda f: f == os.path.basename(f), files)
-    medias: Iterable[File] = filter(F.isfile, map(lambda f: F.chk(src / f), rootfiles))
-    srcdsts = map(lambda file: (file, F.chk(tgt / file.name)), medias)
-    return frozenset(starmap(F.copyfile, srcdsts))
-
-
-
-def copy_notetype_media(src: Dir, tgt: Dir, paths: set[pathlib.Path], m: dict[str, typing.Any]) ‑> frozenset[File] -
-
-

Copy media from notetype m from source to target, returning set of copies.

-
- -Expand source code - -
@curried
-@beartype
-def copy_notetype_media(
-    src: Dir, tgt: Dir, paths: Set[Path], m: NotetypeDict
-) -> FrozenSet[File]:
-    """Copy media from notetype `m` from source to target, returning set of copies."""
-    matches: Iterable[Path] = filter(lambda p: hasmedia(m, str(p)), paths)
-    medias = filter(F.isfile, map(lambda p: F.chk(src / p), matches))
-    srcdsts = map(lambda f: (f, F.chk(tgt / f.name)), medias)
-    return frozenset(starmap(F.copyfile, srcdsts))
-
-
-
-def cp_ki(ki_rev: KiRev, suffix: str) ‑> KiRepo -
-
-

Given a KiRev, i.e. a pair of the form (kirepo, SHA), we clone -kirepo.repo into a temp directory and hard reset to the given commit -hash. Copies the .ki/ directory from ki_rev.kirepo without making any -changes.

-

Parameters

-
-
ki_rev : KiRev
-
The ki repository to clone, and a commit for it.
-
suffix : str
-
/tmp/…/ path suffix, e.g. ki/local/.
-
-

Returns

-
-
KiRepo
-
The copied ki repository.
-
-
- -Expand source code - -
@beartype
-def cp_ki(ki_rev: KiRev, suffix: str) -> KiRepo:
-    """
-    Given a KiRev, i.e. a pair of the form (kirepo, SHA), we clone
-    `kirepo.repo` into a temp directory and hard reset to the given commit
-    hash. Copies the .ki/ directory from `ki_rev.kirepo` without making any
-    changes.
-
-    Parameters
-    ----------
-    ki_rev : KiRev
-        The ki repository to clone, and a commit for it.
-    suffix : str
-        /tmp/.../ path suffix, e.g. `ki/local/`.
-
-    Returns
-    -------
-    KiRepo
-        The copied ki repository.
-    """
-    rev: Rev = F.ki_rev_to_rev(ki_rev)
-    print(F.root(rev.repo))
-    ephem: git.Repo = cp_repo(rev, suffix)
-    F.force_mkdir(F.root(ephem) / KI / BACKUPS_DIR)
-    kirepo: KiRepo = M.kirepo(F.root(ephem))
-    return kirepo
-
-
-
-def cp_repo(rev: Rev, suffix: str) ‑> git.repo.base.Repo -
-
-

Get a temporary copy of a git repository in /tmp//.

-
- -Expand source code - -
@beartype
-def cp_repo(rev: Rev, suffix: str) -> git.Repo:
-    """Get a temporary copy of a git repository in /tmp/<suffix>/."""
-    # Copy the entire repo into a temp directory ending in `../suffix/`.
-    target: NoFile = F.chk(F.mkdtemp() / suffix)
-    ephem = git.Repo(F.copytree(F.root(rev.repo), target))
-
-    # Do a reset --hard to the given SHA.
-    ephem.git.reset(rev.sha, hard=True)
-    return ephem
-
-
-
-def diff2(repo: git.repo.base.Repo, parse: collections.abc.Callable[[Delta], DeckNote]) ‑> collections.abc.Iterable[typing.Union[Delta, Warning]] -
-
-

Diff repo from HEAD~1 to HEAD.

-
- -Expand source code - -
@beartype
-def diff2(
-    repo: git.Repo,
-    parse: Callable[[Delta], DeckNote],
-) -> Iterable[Union[Delta, Warning]]:
-    """Diff `repo` from `HEAD~1` to `HEAD`."""
-    # We diff from A~B.
-    head1: Rev = M.rev(repo, repo.commit("HEAD~1").hexsha)
-    uuid = hex(random.randrange(16**4))[2:]
-    head1_repo = cp_repo(head1, suffix=f"HEAD~1-{uuid}")
-    a_root, b_root = F.root(head1_repo), F.root(repo)
-    diffidx = repo.commit("HEAD~1").diff(repo.commit("HEAD"))
-
-    # Get the diffs for each change type (e.g. 'DELETED').
-    return chain(*map(mungediff(parse, a_root, b_root), diffidx))
-
-
-
-def do(f: collections.abc.Callable[[typing.Any], typing.Any], xs: collections.abc.Iterable[typing.Any]) ‑> None -
-
-

Perform some action on an iterable.

-
- -Expand source code - -
@beartype
-def do(f: Callable[[Any], Any], xs: Iterable[Any]) -> None:
-    """Perform some action on an iterable."""
-    list(map(f, xs))
-
-
-
-def echo(string: str, silent: bool = False) ‑> None -
-
-

Call click.secho() with formatting.

-
- -Expand source code - -
@beartype
-def echo(string: str, silent: bool = False) -> None:
-    """Call `click.secho()` with formatting."""
-    if not silent:
-        click.secho(string, bold=True)
-
-
-
-def echo_note_change_types(deltas: collections.abc.Iterable[Delta]) ‑> None -
-
-

Write a table of git change types for notes to stdout.

-
- -Expand source code - -
@beartype
-def echo_note_change_types(deltas: Iterable[Delta]) -> None:
-    """Write a table of git change types for notes to stdout."""
-    # pylint: disable=too-many-locals
-    is_change_type = lambda t: lambda d: d.status == t
-
-    vs, ws, xs, ys, zs = tee(deltas, 5)
-    adds = list(filter(is_change_type(ADDED), vs))
-    deletes = list(filter(is_change_type(DELETED), ws))
-    renames = list(filter(is_change_type(RENAMED), xs))
-    modifies = list(filter(is_change_type(MODIFIED), ys))
-    types = list(filter(is_change_type(TYPECHANGED), zs))
-
-    LPAD, RPAD = 15, 9
-    add_info: str = "ADD".ljust(LPAD) + str(len(adds)).rjust(RPAD)
-    delete_info: str = "DELETE".ljust(LPAD) + str(len(deletes)).rjust(RPAD)
-    modification_info: str = "MODIFY".ljust(LPAD) + str(len(modifies)).rjust(RPAD)
-    rename_info: str = "RENAME".ljust(LPAD) + str(len(renames)).rjust(RPAD)
-    type_info: str = "TYPE CHANGE".ljust(LPAD) + str(len(types)).rjust(RPAD)
-
-    echo("=" * (LPAD + RPAD))
-    echo("Note change types")
-    echo("-" * (LPAD + RPAD))
-    echo(f"{add_info}\n{delete_info}\n{modification_info}\n{rename_info}\n{type_info}")
-    echo("=" * (LPAD + RPAD))
-
-
-
-def get_field_note_id(nid: int, fieldname: str) ‑> str -
-
-

A str ID that uniquely identifies field-note pairs.

-
- -Expand source code - -
@beartype
-def get_field_note_id(nid: int, fieldname: str) -> str:
-    """A str ID that uniquely identifies field-note pairs."""
-    return f"{nid}{F.slugify(fieldname)}"
-
-
-
-def get_field_payload(col: anki.collection.Collection, name: str, content: str) ‑> list[str] -
-
-

Get the lines of a markdown snippet for some Anki note field.

-
- -Expand source code - -
@curried
-@beartype
-def get_field_payload(col: Collection, name: str, content: str) -> List[str]:
-    """Get the lines of a markdown snippet for some Anki note field."""
-    text = col.media.escape_media_filenames(html_to_screen(content), unescape=True)
-    return [f"## {name}", text, ""]
-
-
-
-def get_guid(fields: list[str]) ‑> str -
-
-

Construct a new GUID for a note. Adapted from genanki's guid_for().

-
- -Expand source code - -
@beartype
-def get_guid(fields: List[str]) -> str:
-    """Construct a new GUID for a note. Adapted from genanki's `guid_for()`."""
-    # Get the first 8 bytes of the SHA256 of `contents` as an int.
-    m = hashlib.sha256()
-    m.update("__".join(fields).encode("utf-8"))
-    x = reduce(lambda h, b: (h << 8) + b, m.digest()[:8], 0)
-
-    # convert to the weird base91 format that Anki uses
-    chars = []
-    while x > 0:
-        chars.append(BASE91_TABLE[x % len(BASE91_TABLE)])
-        x //= len(BASE91_TABLE)
-    return "".join(reversed(chars))
-
-
-
-def get_header_lines(colnote) ‑> list[str] -
-
-

Get header of markdown representation of note.

-
- -Expand source code - -
@beartype
-def get_header_lines(colnote) -> List[str]:
-    """Get header of markdown representation of note."""
-    lines = [
-        "# Note",
-        "```",
-        f"guid: {colnote.n.guid}",
-        f"notetype: {colnote.notetype.name}",
-        "```",
-        "",
-        "### Tags",
-        "```",
-    ]
-    lines += colnote.n.tags
-    lines += ["```", ""]
-    return lines
-
-
-
-def get_models_recursively(kirepo: KiRepo) ‑> dict[str, Notetype] -
-
-

Find and merge all models.json files recursively. Returns a dictionary -sending model names to Notetypes.

-
- -Expand source code - -
@beartype
-def get_models_recursively(kirepo: KiRepo) -> Dict[str, Notetype]:
-    """
-    Find and merge all `models.json` files recursively. Returns a dictionary
-    sending model names to Notetypes.
-    """
-
-    @beartype
-    def load(file: File) -> Iterable[Notetype]:
-        """Load a models file."""
-        with open(file, "r", encoding=UTF8) as f:
-            return map(M.notetype, json.load(f).values())
-
-    notetypes = F.cat(map(load, F.rglob(kirepo.root, MODELS_FILE)))
-    return {notetype.name: notetype for notetype in notetypes}
-
-
-
-def get_note_metadata(col: anki.collection.Collection) ‑> dict[str, NoteMetadata] -
-
-

Construct a map from guid -> (nid, mod, mid), adapted from -Anki2Importer._import_notes(). Note that mod is the modification -timestamp, in epoch seconds (timestamp of when the note was last modified).

-
- -Expand source code - -
@beartype
-def get_note_metadata(col: Collection) -> Dict[str, NoteMetadata]:
-    """
-    Construct a map from guid -> (nid, mod, mid), adapted from
-    `Anki2Importer._import_notes()`. Note that `mod` is the modification
-    timestamp, in epoch seconds (timestamp of when the note was last modified).
-    """
-    guids: Dict[str, NoteMetadata] = {}
-    for nid, guid, mod, mid in col.db.execute("select id, guid, mod, mid from notes"):
-        guids[guid] = NoteMetadata(nid, mod, mid)
-    return guids
-
-
-
-def get_note_path(colnote: ColNote, deck_dir: Dir, card_name: str = '') ‑> NoFile -
-
-

Get note path from sort field text.

-
- -Expand source code - -
@beartype
-def get_note_path(colnote: ColNote, deck_dir: Dir, card_name: str = "") -> NoFile:
-    """Get note path from sort field text."""
-    field_text = colnote.sfld
-
-    # Construct filename, stripping HTML tags and sanitizing (quickly).
-    field_text = plain_to_html(field_text)
-    field_text = re.sub("<[^<]+?>", "", field_text)
-
-    # If the HTML stripping removed all text, we just slugify the raw sort
-    # field text.
-    if len(field_text) == 0:
-        field_text = colnote.sfld
-
-    name = field_text[:MAX_FILENAME_LEN]
-    slug = F.slugify(name)
-
-    # If the slug is still empty, use all the fields.
-    if len(slug) == 0:
-        contents = " ".join(colnote.n.values())
-        name = contents[:MAX_FILENAME_LEN]
-        slug = F.slugify(name)
-
-    # Make it so `slug` cannot possibly be an empty string, because then we get
-    # a `Path('.')` which is a bug, and causes a runtime exception. If all else
-    # fails, use the notetype name, hash of the payload, and creation date.
-    if len(slug) == 0:
-        guidhex = colnote.n.guid.encode(UTF8).hex()
-        slug: str = f"{colnote.notetype.name}--{guidhex}"
-
-        # Note IDs are in milliseconds.
-        dt = datetime.datetime.fromtimestamp(colnote.n.id / 1000.0)
-        slug += "--" + dt.strftime("%Y-%m-%d--%Hh-%Mm-%Ss")
-        F.yellow(f"Slug for note with guid '{colnote.n.guid}' is empty...")
-        F.yellow(f"Using hex representation of guid in filename: '{slug}'")
-
-    if card_name != "":
-        slug = f"{slug}_{card_name}"
-    filename: str = f"{slug}{MD}"
-    note_path = F.chk(deck_dir / filename, resolve=False)
-
-    i = 1
-    while not isinstance(note_path, NoFile):
-        filename = f"{slug}_{i}{MD}"
-        note_path = F.chk(deck_dir / filename, resolve=False)
-        i += 1
-
-    return note_path
-
-
-
-def get_note_payload(colnote: ColNote) ‑> str -
-
-

Return the markdown-converted contents of the Anki note represented by -colnote as a string.

-

A ColNote is a dataclass wrapper around a Note object which has been -loaded from the DB.

-
- -Expand source code - -
@beartype
-def get_note_payload(colnote: ColNote) -> str:
-    """
-    Return the markdown-converted contents of the Anki note represented by
-    `colnote` as a string.
-
-    A `ColNote` is a dataclass wrapper around a `Note` object which has been
-    loaded from the DB.
-    """
-    lines = get_header_lines(colnote)
-    lines += F.cat(starmap(get_field_payload(colnote.n.col), colnote.n.items()))
-    return "\n".join(lines)
-
-
-
-def get_target(cwd: Dir, col_file: File, directory: str) ‑> tuple[EmptyDir, bool] -
-
-

Create default target directory.

-
- -Expand source code - -
@beartype
-def get_target(cwd: Dir, col_file: File, directory: str) -> Tuple[EmptyDir, bool]:
-    """Create default target directory."""
-    path = F.chk(Path(directory) if directory != "" else cwd / col_file.stem)
-    new: bool = True
-    if isinstance(path, NoPath):
-        path.mkdir(parents=True)
-        return M.emptydir(path), new
-    if isinstance(path, EmptyDir):
-        new = False
-        return path, new
-    raise TargetExistsError(path)
-
-
-
-def git_pull(remote: str, branch: str, cwd: Dir) ‑> str -
-
-

Pull remote into branch using a subprocess call.

-
- -Expand source code - -
@beartype
-def git_pull(remote: str, branch: str, cwd: Dir) -> str:
-    """Pull remote into branch using a subprocess call."""
-    args = ["git", "pull", "-v", remote, branch]
-    p = subprocess.run(args, check=False, cwd=cwd, capture_output=True)
-    return f"{p.stdout.decode()}\n{p.stderr.decode()}"
-
-
-
-def hasmedia(model: dict[str, typing.Any], fname: str) ‑> bool -
-
-

Check if a notetype has media.

-

Adapted from anki.exporting.AnkiExporter._modelHasMedia(), which is an -instance method, but does not make any use of self, and so could be a -staticmethod. It is a pure function.

-
- -Expand source code - -
@beartype
-def hasmedia(model: NotetypeDict, fname: str) -> bool:
-    """
-    Check if a notetype has media.
-
-    Adapted from `anki.exporting.AnkiExporter._modelHasMedia()`, which is an
-    instance method, but does not make any use of `self`, and so could be a
-    staticmethod. It is a pure function.
-    """
-    # First check the styling.
-    if fname in model["css"]:
-        return True
-    # If no reference to fname then check the templates as well.
-    return any(map(lambda t: fname in t["qfmt"] or fname in t["afmt"], model["tmpls"]))
-
-
-
-def html_to_screen(html: str) ‑> str -
-
-

Convert html for a single field into plaintext, to be displayed within a -markdown file.

-

Does very litle (just converts HTML-escaped special characters like <br> -tags or &nbsp;s to their UTF-8 equivalents).

-
- -Expand source code - -
@beartype
-def html_to_screen(html: str) -> str:
-    """
-    Convert html for a *single field* into plaintext, to be displayed within a
-    markdown file.
-
-    Does very litle (just converts HTML-escaped special characters like `<br>`
-    tags or `&nbsp;`s to their UTF-8 equivalents).
-    """
-    html = re.sub(r"\<style\>.*\<\/style\>", "", html, flags=re.S)
-    plain = html
-
-    # For convenience: Un-escape some common LaTeX constructs.
-    plain = plain.replace(r"\\\\", r"\\")
-    plain = plain.replace(r"\\{", r"\{")
-    plain = plain.replace(r"\\}", r"\}")
-    plain = plain.replace(r"\*}", r"*}")
-
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-
-    plain = plain.replace("<br>", "\n")
-    plain = plain.replace("<br/>", "\n")
-    plain = plain.replace("<br />", "\n")
-
-    # Unbreak lines within src attributes.
-    plain = re.sub('src= ?\n"', 'src="', plain)
-
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    return plain
-
-
-
-def is_anki_note(path: File) ‑> bool -
-
-

Check if file is a ki-style markdown note.

-
- -Expand source code - -
@beartype
-def is_anki_note(path: File) -> bool:
-    """Check if file is a `ki`-style markdown note."""
-    # Ought to have markdown file extension.
-    if path.suffix != ".md":
-        return False
-    with open(path, "r", encoding=UTF8) as md_f:
-        lines = md_f.readlines()
-    if len(lines) < 8:
-        return False
-    if lines[0] != "# Note\n":
-        return False
-    if lines[1] != "```\n":
-        return False
-    if not re.match(r"^guid: ", lines[2]):
-        return False
-    return True
-
-
-
-def is_ignorable(root: Dir, path: pathlib.Path) ‑> bool -
-
-

Filter out paths in a git repository diff that do not correspond to Anki -notes.

-

We could do this purely using calls to is_anki_note(), but these are -expensive, so we try to find matches without opening any files first.

-
- -Expand source code - -
@beartype
-def is_ignorable(root: Dir, path: Path) -> bool:
-    """
-    Filter out paths in a git repository diff that do not correspond to Anki
-    notes.
-
-    We could do this purely using calls to `is_anki_note()`, but these are
-    expensive, so we try to find matches without opening any files first.
-    """
-    # Ignore if `path` is an exact match for any of the patterns Since the
-    # contents of a git repository diff are always going to be files, this
-    # alone will not correctly ignore directory names given in `patterns`.
-    #
-    # If any of the patterns in `dirnames` resolve to one of the parents of
-    # `path`, return a warning, so that we are able to filter out entire
-    # directories.
-    filenames, dirnames = IGNORE_FILES, IGNORE_DIRS
-    if path.name in filenames | dirnames or len(set(path.parts) & dirnames) > 0:
-        return True
-
-    # If `path` is an extant file (not a directory) and *not* a note, ignore it.
-    file = F.chk(root / path)
-    if isinstance(file, File) and not is_anki_note(file):
-        return True
-    return False
-
-
-
-def localmedia(s: str, regex: str) ‑> collections.abc.Iterable[str] -
-
-

Return local media filenames matching the given regex pattern.

-
- -Expand source code - -
@curried
-@beartype
-def localmedia(s: str, regex: str) -> Iterable[str]:
-    """Return local media filenames matching the given regex pattern."""
-    fnames = map(lambda m: m.group("fname"), re.finditer(regex, s))
-    fnames = map(lambda s: s.strip(), fnames)
-    return filter(lambda x: not re.match(URLS, x.lower()), fnames)
-
-
-
-def lock(col_file: File) ‑> sqlite3.Connection -
-
-

Check that lock can be acquired on a SQLite3 database given a path.

-
- -Expand source code - -
@beartype
-def lock(col_file: File) -> sqlite3.Connection:
-    """Check that lock can be acquired on a SQLite3 database given a path."""
-    try:
-        con = sqlite3.connect(col_file, timeout=0.1)
-        con.isolation_level = "EXCLUSIVE"
-        con.execute("BEGIN EXCLUSIVE")
-    except sqlite3.DatabaseError as err:
-        raise SQLiteLockError(col_file, err) from err
-    if sys.platform == "win32":
-        con.commit()
-        con.close()
-    return con
-
-
-
-def media_filenames_in_field(col: anki.collection.Collection, s: str) ‑> collections.abc.Iterable[str] -
-
-

A copy of MediaManager.files_in_str(), but without LaTeX rendering.

-
- -Expand source code - -
@beartype
-def media_filenames_in_field(col: Collection, s: str) -> Iterable[str]:
-    """A copy of `MediaManager.files_in_str()`, but without LaTeX rendering."""
-    s = (s.strip()).replace('"', "")
-    return F.cat(map(localmedia(s), col.media.regexps))
-
-
-
-def mediabytes(col: anki.collection.Collection, file: File) ‑> MediaBytes -
-
-

Get old bytes (from collection) and new bytes (from file) for media file.

-
- -Expand source code - -
@curried
-@beartype
-def mediabytes(col: Collection, file: File) -> MediaBytes:
-    """Get old bytes (from collection) and new bytes (from file) for media file."""
-    old: bytes = mediadata(col, file.name)
-    new: bytes = file.read_bytes()
-    return MediaBytes(file=file, old=old, new=new)
-
-
-
-def mediadata(col: anki.collection.Collection, fname: str) ‑> bytes -
-
-

Get media file content as bytes (empty if missing).

-
- -Expand source code - -
@beartype
-def mediadata(col: Collection, fname: str) -> bytes:
-    """Get media file content as bytes (empty if missing)."""
-    if not col.media.have(fname):
-        return b""
-    path = os.path.join(col.media.dir(), fname)
-    try:
-        with open(path, "rb") as f:
-            return f.read()
-    except OSError:
-        return b""
-
-
- -
-

Return a windows link for a card if one is necessary.

-
- -Expand source code - -
@beartype
-def mklink(targetd: Dir, colnote: ColNote, deckd: Dir, card: Card, file: File) -> None:
-    """Return a windows link for a card if one is necessary."""
-    note_path: NoFile = get_note_path(colnote, deckd, card.template()["name"])
-    M.link(targetd, PlannedLink(link=note_path, tgt=file))
-
-
-
-def mungediff(parse: collections.abc.Callable[[Delta], DeckNote], a_root: Dir, b_root: Dir, d: git.diff.Diff) ‑> collections.abc.Iterable[typing.Union[Delta, Warning]] -
-
-

Extract deltas and warnings from a collection of diffs.

-
- -Expand source code - -
@curried
-@beartype
-def mungediff(
-    parse: Callable[[Delta], DeckNote], a_root: Dir, b_root: Dir, d: git.Diff
-) -> Iterable[Union[Delta, Warning]]:
-    """Extract deltas and warnings from a collection of diffs."""
-    a, b = d.a_path, d.b_path
-    a, b = a if a else b, b if b else a
-    if is_ignorable(a_root, Path(a)) or is_ignorable(b_root, Path(b)):
-        return []
-
-    # Get absolute and relative paths to 'a' and 'b'.
-    AB = namedtuple("AB", "a b")
-    files = AB(F.chk(a_root / a), F.chk(b_root / b))
-    rels = AB(Path(a), Path(b))
-
-    if d.change_type == DELETED.value:
-        if not F.isfile(files.a):
-            return [DeletedFileNotFoundWarning(rels.a)]
-        return [Delta(GitChangeType.DELETED, files.a, rels.a)]
-    if not F.isfile(files.b):
-        return [DiffTargetFileNotFoundWarning(rels.b)]
-    if d.change_type == RENAMED.value:
-        a_delta = Delta(GitChangeType.DELETED, files.a, rels.a)
-        b_delta = Delta(GitChangeType.ADDED, files.b, rels.b)
-        a_decknote, b_decknote = parse(a_delta), parse(b_delta)
-        if a_decknote.guid != b_decknote.guid:
-            return [a_delta, b_delta]
-    return [Delta(GitChangeType(d.change_type), files.b, rels.b)]
-
-
-
-def parentmap(root: Union[RootDeck]) ‑> dict[str, typing.Union[RootDeck]] -
-
-

Map deck fullnames to parent Decks.

-
- -Expand source code - -
@beartype
-def parentmap(root: Union[Root, Deck]) -> Dict[str, Union[Root, Deck]]:
-    """Map deck fullnames to parent `Deck`s."""
-    parents = {child.fullname: root for child in root.children}
-    return parents | reduce(lambda x, y: x | y, map(parentmap, root.children), {})
-
-
-
-def parse_note(parser: lark.lark.Lark, transformer: NoteTransformer, delta: Delta) ‑> DeckNote -
-
-

Parse with lark.

-
- -Expand source code - -
@curried
-@beartype
-def parse_note(parser: Lark, transformer: NoteTransformer, delta: Delta) -> DeckNote:
-    """Parse with lark."""
-    tree = parser.parse(delta.path.read_text(encoding=UTF8))
-    flatnote: FlatNote = transformer.transform(tree)
-    parts: Tuple[str, ...] = delta.relpath.parent.parts
-    deck: str = "::".join(parts)
-
-    # Generate a GUID from the hash of the field contents if the `guid` field
-    # in the note file was left blank.
-    fields = list(flatnote.fields.values())
-    guid = flatnote.guid if flatnote.guid != "" else get_guid(fields)
-
-    return DeckNote(
-        title=flatnote.title,
-        guid=guid,
-        deck=deck,
-        model=flatnote.model,
-        tags=flatnote.tags,
-        fields=flatnote.fields,
-    )
-
-
-
-def plain_to_html(plain: str) ‑> str -
-
-

Convert plain text to html

-
- -Expand source code - -
@beartype
-def plain_to_html(plain: str) -> str:
-    """Convert plain text to html"""
-    # Minor clean up
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    plain = re.sub(r"\<i\>\s*\<\/i\>", "", plain)
-    plain = re.sub(r"\<div\>\s*\<\/div\>", "", plain)
-
-    # Convert newlines to `<br>` tags.
-    if not re.search(HTML_REGEX, plain):
-        plain = plain.replace("\n", "<br>")
-
-    return plain.strip()
-
-
- -
-

Get the target of the to-be-created media symlink.

-
- -Expand source code - -
@curried
-@beartype
-def planned_link(
-    parents: Dict[str, Union[Root, Deck]], deck: Deck, media_file: File
-) -> Optional[PlannedLink]:
-    """Get the target of the to-be-created media symlink."""
-    link: Path = F.chk(deck.mediad / media_file.name, resolve=False)
-    if not isinstance(link, NoFile):
-        return None
-
-    parent: Union[Root, Deck] = parents[deck.fullname]
-    if isinstance(parent, Root):
-        tgt = media_file
-    else:
-        tgt = F.chk(parent.mediad / media_file.name, resolve=False)
-    return PlannedLink(link=link, tgt=tgt)
-
-
-
-def postorder(node: Union[RootDeck]) ‑> list[Deck] -
-
-

Post-order traversal. Guarantees that we won't process a node until we've -processed all its children.

-
- -Expand source code - -
@beartype
-def postorder(node: Union[Root, Deck]) -> List[Deck]:
-    """
-    Post-order traversal. Guarantees that we won't process a node until we've
-    processed all its children.
-    """
-    descendants: List[Deck] = reduce(lambda xs, x: xs + postorder(x), node.children, [])
-    return descendants if isinstance(node, Root) else descendants + [node]
-
-
-
-def preorder(node: Union[RootDeck]) ‑> list[Deck] -
-
-

Pre-order traversal. Guarantees that we won't process a node until -we've processed all its ancestors.

-
- -Expand source code - -
@beartype
-def preorder(node: Union[Root, Deck]) -> List[Deck]:
-    """
-    Pre-order traversal. Guarantees that we won't process a node until
-    we've processed all its ancestors.
-    """
-    descendants: List[Deck] = reduce(lambda xs, x: xs + preorder(x), node.children, [])
-    return descendants if isinstance(node, Root) else [node] + descendants
-
-
-
-def push_note(col: anki.collection.Collection, timestamp_ns: int, guids: dict[str, NoteMetadata], new_nids: collections.abc.Iterator[int], decknote: DeckNote) ‑> collections.abc.Iterable[Warning] -
-
-

Update the Anki Note object in col corresponding to decknote, -creating it if it does not already exist.

-

Raises

-
-
MissingNotetypeError
-
If we can't find a notetype with the name provided in decknote.
-
-
- -Expand source code - -
@curried
-@beartype
-def push_note(
-    col: Collection,
-    timestamp_ns: int,
-    guids: Dict[str, NoteMetadata],
-    new_nids: Iterator[int],
-    decknote: DeckNote,
-) -> Iterable[Warning]:
-    """
-    Update the Anki `Note` object in `col` corresponding to `decknote`,
-    creating it if it does not already exist.
-
-    Raises
-    ------
-    MissingNotetypeError
-        If we can't find a notetype with the name provided in `decknote`.
-    """
-    # Notetype/model names are privileged in Anki, so if we don't find the
-    # right name, we raise an error.
-    model_id: Optional[int] = col.models.id_for_name(decknote.model)
-    if model_id is None:
-        raise MissingNotetypeError(decknote.model)
-    new_notetype: Notetype = M.notetype(col.models.get(model_id))
-
-    if decknote.guid in guids:
-        nid: int = guids[decknote.guid].nid
-        try:
-            note: Note = col.get_note(nid)
-        except NotFoundError as err:
-            print(f"{nid = }")
-            print(f"{decknote.guid = }")
-            raise err
-    else:
-        nid: int = next(new_nids)
-        note: Note = add_db_note(
-            col,
-            nid,
-            decknote.guid,
-            model_id,
-            mod=int(timestamp_ns // 1e9),
-            usn=-1,
-            tags=decknote.tags,
-            fields=list(decknote.fields.values()),
-            sfld=decknote.fields[new_notetype.sortf.name],
-            csum=0,
-            flags=0,
-            data="",
-        )
-
-    # If we are updating an existing note, we need to know the old and new
-    # notetypes, and then update the notetype (and the rest of the note data)
-    # accordingly.
-    old_notetype: Notetype = M.notetype(note.note_type())
-    return update_note(note, decknote, old_notetype, new_notetype)
-
-
-
-def stardo(f: collections.abc.Callable[[typing.Any], typing.Any], xs: collections.abc.Iterable[typing.Any]) ‑> None -
-
-

Perform some action on an iterable of tuples, unpacking arguments.

-
- -Expand source code - -
@beartype
-def stardo(f: Callable[[Any], Any], xs: Iterable[Any]) -> None:
-    """Perform some action on an iterable of tuples, unpacking arguments."""
-    list(starmap(f, xs))
-
-
- -
-

Create chained symlinks for a single deck.

-
- -Expand source code - -
@curried
-@beartype
-def symlink_deck_media(
-    col: Collection,
-    targetd: Dir,
-    media: Dict[int, Set[File]],
-    parents: Dict[str, Union[Root, Deck]],
-    deck: Deck,
-) -> None:
-    """Create chained symlinks for a single deck."""
-    # Get nids for all descendant notes with media.
-    descendants: List[CardId] = col.decks.cids(did=deck.did, children=True)
-    cards: Iterable[Card] = map(col.get_card, descendants)
-    nids: Set[NoteId] = {NOTETYPE_NID} | set(map(lambda c: c.nid, cards))
-
-    # Get link path and target for each media file, and create the links.
-    files = F.cat(map(lambda nid: media[nid], filter(lambda nid: nid in media, nids)))
-    plinks = filter(None, map(planned_link(parents, deck), files))
-    do(M.link(targetd), plinks)
-
-
- -
-

Chain symlinks up the deck tree into top-level <collection>/_media/.

-
- -Expand source code - -
@beartype
-def symlink_media(
-    col: Collection,
-    root: Root,
-    targetd: Dir,
-    media: Dict[int, Set[File]],
-) -> None:
-    """Chain symlinks up the deck tree into top-level `<collection>/_media/`."""
-    decks: List[Deck] = preorder(root)
-    parents: Dict[str, Union[Root, Deck]] = parentmap(root)
-    return do(symlink_deck_media(col, targetd, media, parents), decks)
-
-
-
-def unlock(con: sqlite3.Connection) ‑> None -
-
-

Unlock a SQLite3 database.

-
- -Expand source code - -
@beartype
-def unlock(con: sqlite3.Connection) -> None:
-    """Unlock a SQLite3 database."""
-    if sys.platform == "win32":
-        return
-    con.commit()
-    con.close()
-
-
-
-def update_field(decknote: DeckNote, note: anki.notes.Note, key: str, field: str) ‑> None -
-
-

Update a field contained in note.

-
- -Expand source code - -
@curried
-@beartype
-def update_field(decknote: DeckNote, note: Note, key: str, field: str) -> None:
-    """Update a field contained in `note`."""
-    try:
-        note[key] = plain_to_html(field)
-    except IndexError as err:
-        raise AnkiDBNoteMissingFieldsError(decknote, note.id, key) from err
-
-
-
-def update_note(note: anki.notes.Note, decknote: DeckNote, old_notetype: Notetype, new_notetype: Notetype) ‑> collections.abc.Iterable[Warning] -
-
-

Change all the data of note to that given in decknote.

-

This is only to be called on notes whose nid already exists in the -database. -Creates a new deck if decknote.deck doesn't exist. -Assumes -that the model has already been added to the collection, and raises an -exception if it finds otherwise. -Changes notetype to that specified by -decknote.model. -Overwrites all fields with decknote.fields.

-

Updates: -- tags -- deck -- model -- fields

-
- -Expand source code - -
@beartype
-def update_note(
-    note: Note, decknote: DeckNote, old_notetype: Notetype, new_notetype: Notetype
-) -> Iterable[Warning]:
-    """
-    Change all the data of `note` to that given in `decknote`.
-
-    This is only to be called on notes whose nid already exists in the
-    database.  Creates a new deck if `decknote.deck` doesn't exist.  Assumes
-    that the model has already been added to the collection, and raises an
-    exception if it finds otherwise.  Changes notetype to that specified by
-    `decknote.model`.  Overwrites all fields with `decknote.fields`.
-
-    Updates:
-    - tags
-    - deck
-    - model
-    - fields
-    """
-
-    # Check that the passed argument `new_notetype` has a name consistent with
-    # the model specified in `decknote`. The former should be derived from the
-    # latter, and if they don't match, there is a bug in the caller.
-    if decknote.model != new_notetype.name:
-        raise NotetypeMismatchError(decknote, new_notetype)
-
-    nid = note.id
-    note.tags = decknote.tags
-    note.flush()
-
-    # Set the deck of the given note, as well as all its cards, and create a
-    # deck with this name if it doesn't already exist. See the
-    # comments/docstrings in the implementation of the
-    # `anki.decks.DeckManager.id()` method.
-    newdid: int = note.col.decks.id(decknote.deck, create=True)
-    cids = [c.id for c in note.cards()]
-    if cids:
-        note.col.set_deck(cids, newdid)
-
-    # Set notetype (also clears all fields).
-    if old_notetype.id != new_notetype.id:
-        fmap = {field.ord: None for field in old_notetype.flds}
-        note.col.models.change(old_notetype.dict, [nid], new_notetype.dict, fmap, None)
-        note.load()
-
-    # Validate field keys against notetype.
-    warnings: List[Warning] = validate_decknote_fields(new_notetype, decknote)
-    if len(warnings) > 0:
-        return warnings
-
-    # Set field values and flush to collection database. This is correct
-    # because every field name that appears in `new_notetype` is contained in
-    # `decknote.fields`, or else we would have printed a warning and returned
-    # above.
-    missing = {key for key in decknote.fields if key not in note}
-    warnings = map(lambda k: NoteFieldValidationWarning(nid, k, new_notetype), missing)
-    fields = [(key, field) for key, field in decknote.fields.items() if key in note]
-    stardo(update_field(decknote, note), fields)
-    note.flush()
-
-    # Remove if unhealthy.
-    fwarns: List[Warning] = check_fields_health(note)
-    if len(fwarns) > 0:
-        note.col.remove_notes([nid])
-    return chain(warnings, fwarns)
-
-
-
-def validate_decknote_fields(notetype: Notetype, decknote: DeckNote) ‑> list[Warning] -
-
-

Validate that the fields given in the note match the notetype.

-
- -Expand source code - -
@beartype
-def validate_decknote_fields(notetype: Notetype, decknote: DeckNote) -> List[Warning]:
-    """Validate that the fields given in the note match the notetype."""
-    warnings: List[Warning] = []
-    names: List[str] = [field.name for field in notetype.flds]
-
-    # TODO: It might also be nice to print the path of the note in the
-    # repository. This would have to be added to the `DeckNote` spec.
-    if len(decknote.fields.keys()) != len(names):
-        warnings.append(WrongFieldCountWarning(decknote, names))
-
-    mk_warning = lambda n, k: InconsistentFieldNamesWarning(n, k, decknote)
-    names_and_keys = F.starfilter(
-        lambda n, k: n != k, zip(names, decknote.fields.keys())
-    )
-    return warnings + list(starmap(mk_warning, names_and_keys))
-
-
-
-def warn(w: Warning) ‑> None -
-
-

Call click.secho() with formatting (yellow).

-
- -Expand source code - -
@beartype
-def warn(w: Warning) -> None:
-    """Call `click.secho()` with formatting (yellow)."""
-    click.secho(f"WARNING: {str(w)}", bold=True, fg="yellow")
-
-
-
-def write_collection(deltas: collections.abc.Iterable[Delta], models: dict[str, Notetype], kirepo: KiRepo, parse: collections.abc.Callable[[Delta], DeckNote], head_kirepo: KiRepo, con: sqlite3.Connection) ‑> PushResult -
-
-

Push a list of Deltas to an Anki collection.

-
- -Expand source code - -
@beartype
-def write_collection(
-    deltas: Iterable[Delta],
-    models: Dict[str, Notetype],
-    kirepo: KiRepo,
-    parse: Callable[[Delta], DeckNote],
-    head_kirepo: KiRepo,
-    con: sqlite3.Connection,
-) -> PushResult:
-    """Push a list of `Delta`s to an Anki collection."""
-    # pylint: disable=too-many-locals
-    # Copy collection to a temp directory.
-    temp_col_dir: Dir = F.mkdtemp()
-    new_col_file = temp_col_dir / kirepo.col_file.name
-    col_name: str = kirepo.col_file.name
-    new_col_file: NoFile = F.chk(temp_col_dir / col_name)
-    new_col_file: File = F.copyfile(kirepo.col_file, new_col_file)
-
-    # Open collection and add new models to root `models.json` file.
-    col: Collection = M.collection(new_col_file)
-    do(add_model(col), models.values())
-
-    # Stash both unstaged and staged files (including untracked).
-    head_kirepo.repo.git.stash(include_untracked=True, keep_index=True)
-    head_kirepo.repo.git.reset("HEAD", hard=True)
-
-    # Display table of note change type counts and partition deltas into
-    # 'deletes' and 'not deletes'.
-    xs, ys, zs = tee(deltas, 3)
-    echo_note_change_types(xs)
-    dels: Iterable[Delta] = filter(lambda d: d.status == DELETED, ys)
-    deltas: Iterable[Delta] = filter(lambda d: d.status != DELETED, zs)
-
-    # Map guid -> (nid, mod, mid).
-    guids: Dict[str, NoteMetadata] = get_note_metadata(col)
-
-    # Parse to-be-deleted notes and remove them from collection.
-    del_guids: Iterable[str] = map(lambda dd: dd.guid, map(parse, dels))
-    del_guids = set(filter(lambda g: g in guids, del_guids))
-    del_nids: Iterable[NoteId] = map(lambda g: guids[g].nid, del_guids)
-    col.remove_notes(list(del_nids))
-
-    # Push changes for all other notes.
-    guids = {k: v for k, v in guids.items() if k not in del_guids}
-    timestamp_ns: int = time.time_ns()
-    new_nids: Iterator[int] = itertools.count(int(timestamp_ns / 1e6))
-    decknotes: Iterable[DeckNote] = map(parse, deltas)
-    do(warn, F.cat(map(push_note(col, timestamp_ns, guids, new_nids), decknotes)))
-
-    # It is always safe to save changes to the DB, since the DB is a copy.
-    col.close(save=True)
-
-    # Backup collection file and overwrite collection.
-    backup(kirepo)
-    F.copyfile(new_col_file, kirepo.col_file)
-    echo(f"Overwrote '{kirepo.col_file}'")
-
-    # Add media files to collection.
-    col: Collection = M.collection(kirepo.col_file)
-    media_files = F.rglob(head_kirepo.root, MEDIA_FILE_RECURSIVE_PATTERN)
-    mbytes: Iterable[MediaBytes] = map(mediabytes(col), media_files)
-
-    # Skip media files whose twin in collection has same name and same data.
-    mbytes = filter(lambda m: m.old == b"" or m.old != m.new, mbytes)
-
-    # Add (and possibly rename) media paths.
-    renames = filter(lambda a: a.file.name != a.new_name, map(addmedia(col), mbytes))
-    warnings = map(lambda r: RenamedMediaFileWarning(r.file.name, r.new_name), renames)
-    do(warn, warnings)
-    col.close(save=True)
-
-    # Append and commit collection checksum to hashes file.
-    append_md5sum(kirepo.ki, kirepo.col_file.name, F.md5(kirepo.col_file))
-    commit_hashes_file(kirepo)
-
-    # Update commit SHA of most recent successful PUSH and unlock SQLite DB.
-    kirepo.repo.delete_tag(LCA)
-    kirepo.repo.create_tag(LCA)
-    unlock(con)
-    return PushResult.NONTRIVIAL
-
-
-
-def write_decks(col: anki.collection.Collection, targetdir: Dir, colnotes: dict[int, ColNote], media: dict[int, set[File]]) ‑> None -
-
-

The proper way to do this is a DFS traversal, perhaps recursively, which -will make it easier to keep things purely functional, accumulating the -model ids of the children in each node. For this, we must construct a tree -from the deck names.

-

Implement new ColNote-writing procedure, using DeckTreeNodes.

-

It must do the following for each deck: -- create the deck directory -- write the models.json file -- create and populate the media directory -- write the note payload for each note in the correct deck, exactly once

-

In other words, for each deck, we need to write all of its: -- models -- media -- notes

-

The first two are cumulative: we want the models and media of subdecks to -be included in their ancestors. The notes, however, should not be -cumulative. Indeed, we want each note to appear exactly once in the -entire repository, making allowances for the case where a single note's -cards are spread across multiple decks, in which case we must create a -symlink.

-

And actually, both of these cases are nicely taken care of for us by the -DeckManager.cids() function, which has a children: bool parameter -which toggles whether or not to get the card ids of subdecks or not.

-
- -Expand source code - -
@beartype
-def write_decks(
-    col: Collection,
-    targetdir: Dir,
-    colnotes: Dict[int, ColNote],
-    media: Dict[int, Set[File]],
-) -> None:
-    """
-    The proper way to do this is a DFS traversal, perhaps recursively, which
-    will make it easier to keep things purely functional, accumulating the
-    model ids of the children in each node. For this, we must construct a tree
-    from the deck names.
-
-    Implement new `ColNote`-writing procedure, using `DeckTreeNode`s.
-
-    It must do the following for each deck:
-    - create the deck directory
-    - write the models.json file
-    - create and populate the media directory
-    - write the note payload for each note in the correct deck, exactly once
-
-    In other words, for each deck, we need to write all of its:
-    - models
-    - media
-    - notes
-
-    The first two are cumulative: we want the models and media of subdecks to
-    be included in their ancestors. The notes, however, should not be
-    cumulative. Indeed, we want each note to appear exactly once in the
-    entire repository, making allowances for the case where a single note's
-    cards are spread across multiple decks, in which case we must create a
-    symlink.
-
-    And actually, both of these cases are nicely taken care of for us by the
-    `DeckManager.cids()` function, which has a `children: bool` parameter
-    which toggles whether or not to get the card ids of subdecks or not.
-    """
-    # Accumulate pairs of model ids and notetype maps. The return type of the
-    # `ModelManager.get()` call below indicates that it may return `None`,
-    # but we know it will not because we are getting the notetype id straight
-    # from the Anki DB.
-    #
-    # Dump the models file for the whole repository.
-    models = {m.id: col.models.get(m.id) for m in col.models.all_names_and_ids()}
-    with open(targetdir / MODELS_FILE, "w", encoding=UTF8) as f:
-        json.dump(models, f, ensure_ascii=False, indent=4, sort_keys=True)
-
-    # Construct an iterable of all decks except the trivial deck.
-    root: Deck = M.tree(col, targetdir, col.decks.deck_tree())
-    collisions, decks = F.part(lambda d: MEDIA in d.fullname, postorder(root))
-    if any(True for _ in collisions):
-        warn(MediaDirectoryDeckNameCollisionWarning())
-    decks = list(decks)
-    deckmap = {d.fullname: d for d in decks}
-
-    # Write cards, models, and media to filesystem.
-    do(write_note(col, targetdir, deckmap), TQ(colnotes.values(), "Notes"))
-    do(write_models(col, models), TQ(decks, "Notetypes"))
-    symlink_media(col, root, targetdir, media)
-
-
-
-def write_models(col: anki.collection.Collection, models: dict[int, dict[str, typing.Any]], deck: Deck) ‑> None -
-
-

Write the models.json file for the given deck.

-
- -Expand source code - -
@curried
-@beartype
-def write_models(col: Collection, models: Dict[int, NotetypeDict], deck: Deck) -> None:
-    """Write the `models.json` file for the given deck."""
-    did: int = deck.did
-    deckd: Dir = deck.deckd
-    descendants: List[CardId] = col.decks.cids(did=did, children=True)
-    cards: List[Card] = list(map(col.get_card, descendants))
-    descendant_mids: Set[int] = {c.note().mid for c in cards}
-
-    # Write `models.json` for current deck.
-    deck_models = {mid: models[mid] for mid in descendant_mids}
-    with open(deckd / MODELS_FILE, "w", encoding=UTF8) as f:
-        json.dump(deck_models, f, ensure_ascii=False, indent=4, sort_keys=True)
-
-
-
-def write_note(col: anki.collection.Collection, targetd: Dir, deckmap: dict[str, Deck], colnote: ColNote) ‑> File -
-
-
-
- -Expand source code - -
@curried
-@beartype
-def write_note(
-    col: Collection,
-    targetd: Dir,
-    deckmap: Dict[str, Deck],
-    colnote: ColNote,
-) -> File:
-    decknames = set(map(lambda c: c.col.decks.name(c.did), colnote.n.cards()))
-    sortf = colnote.sfld
-    if len(decknames) == 0:
-        raise ValueError(f"No cards for note: {sortf}")
-    if len(decknames) > 1:
-        raise ValueError(f"Cards for note {sortf} are in distinct decks: {decknames}")
-    fullname = decknames.pop()
-    parts = fullname.split("::")
-    if "_media" in parts:
-        raise ValueError(f"Bad deck name '{fullname}' (cannot contain '_media')")
-    deck: Deck = deckmap[fullname]
-    path: NoFile = get_note_path(colnote, deck.deckd)
-    payload: str = get_note_payload(colnote)
-    return F.write(path, payload)
-
-
-
-def write_repository(col_file: File, targetdir: Dir, dotki: DotKi, media_target_dir: EmptyDir) ‑> None -
-
-

Write notes to appropriate directories in targetdir.

-
- -Expand source code - -
@beartype
-def write_repository(
-    col_file: File,
-    targetdir: Dir,
-    dotki: DotKi,
-    media_target_dir: EmptyDir,
-) -> None:
-    """Write notes to appropriate directories in `targetdir`."""
-    # Create config file.
-    config = configparser.ConfigParser()
-    config["remote"] = {"path": col_file}
-    with open(dotki.config, "w", encoding=UTF8) as config_f:
-        config.write(config_f)
-
-    # Create temp directory for htmlfield text files.
-    tempdir: EmptyDir = F.mkdtemp()
-    root: EmptyDir = F.mksubdir(tempdir, FIELD_HTML_SUFFIX)
-
-    # ColNote-containing data structure, to be passed to `write_decks()`.
-    col: Collection = M.collection(col_file)
-    nids: Iterable[int] = TQ(col.find_notes(query=""), "Notes")
-    colnotes: Dict[int, ColNote] = {nid: M.colnote(col, nid) for nid in nids}
-    media: Dict[int, Set[File]] = copy_media_files(col, media_target_dir)
-
-    write_decks(
-        col=col,
-        targetdir=targetdir,
-        colnotes=colnotes,
-        media=media,
-    )
-
-    F.rmtree(root)
-    col.close(save=False)
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/maybes.html b/docs/maybes.html deleted file mode 100644 index 7da35ec5..00000000 --- a/docs/maybes.html +++ /dev/null @@ -1,1324 +0,0 @@ - - - - - - -ki.maybes API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.maybes

-
-
-

Factory functions for safely handling errors in type construction.

-
- -Expand source code - -
#!/usr/bin/env python3
-"""Factory functions for safely handling errors in type construction."""
-
-# pylint: disable=invalid-name, missing-class-docstring, broad-except
-# pylint: disable=too-many-return-statements, too-many-lines, import-self
-# pylint: disable=no-value-for-parameter
-
-import re
-import traceback
-import configparser
-from pathlib import Path
-
-import git
-from lark import Lark
-from beartype import beartype
-from beartype.typing import Union, Dict, Any, List, Tuple, Iterable
-
-import anki
-from anki.decks import DeckTreeNode
-from anki.errors import NotFoundError
-from anki.collection import Collection
-
-import ki.maybes as M
-import ki.functional as F
-from ki.types import (
-    MODELS_FILE,
-    File,
-    Dir,
-    EmptyDir,
-    NoPath,
-    NoFile,
-    Link,
-    PseudoFile,
-    KiRepo,
-    KiRev,
-    Rev,
-    Template,
-    Field,
-    ColNote,
-    Deck,
-    Root,
-    DotKi,
-    PlannedLink,
-    Notetype,
-    Submodule,
-    NotetypeKeyError,
-    UnnamedNotetypeError,
-    MissingFieldOrdinalError,
-    MissingNoteIdError,
-    NoteFieldKeyError,
-    MissingFileError,
-    MissingDirectoryError,
-    ExpectedFileButGotDirectoryError,
-    ExpectedDirectoryButGotFileError,
-    ExpectedEmptyDirectoryButGotNonEmptyDirectoryError,
-    ExpectedNonexistentPathError,
-    StrangeExtantPathError,
-    NotKiRepoError,
-    GitRefNotFoundError,
-    GitHeadRefNotFoundError,
-    GitFileModeParseError,
-    AnkiAlreadyOpenError,
-)
-from ki.transformer import NoteTransformer
-
-curried = F.curried
-
-KI = ".ki"
-GIT = F.GIT
-MEDIA = "_media"
-GITIGNORE_FILE = ".gitignore"
-GITMODULES_FILE = F.GITMODULES_FILE
-
-CONFIG_FILE = "config"
-HASHES_FILE = "hashes"
-BACKUPS_DIR = "backups"
-
-REMOTE_CONFIG_SECTION = "remote"
-COLLECTION_FILE_PATH_CONFIG_FIELD = "path"
-
-
-BACKUPS_DIR_INFO = """
-This is the '.ki/backups' directory, used to store backups of the '.anki2'
-collection database file before ki overwrites it during a push. It may be
-missing because the current ki repository has become corrupted.
-"""
-
-CONFIG_FILE_INFO = """
-This is the '.ki/config' file, used to store the path to a '.anki2' collection
-database file. It may be missing because the current ki repository has become
-corrupted.
-"""
-
-HASHES_FILE_INFO = """
-This is the '.ki/hashes' file, used to store recent md5sums of the '.anki2'
-collection database file, which allow ki to determine when updates should be
-rejected, i.e. when the user must pull remote changes before they can push
-local ones. It may be missing because the current ki repository has become
-corrupted.
-"""
-
-MODELS_FILE_INFO = f"""
-This is the top-level '{MODELS_FILE}' file, which contains serialized notetypes
-for all notes in the current repository. Ki should always create this during
-cloning. If it has been manually deleted, try reverting to an earlier commit.
-Otherwise, it may indicate that the repository has become corrupted.
-"""
-
-COL_FILE_INFO = """
-This is the '.anki2' database file that contains all the data for a user's
-collection. This path was contained in the '.ki/config' file, indicating that
-the collection this repository previously referred to has been moved or
-deleted. The path can be manually fixed by editing the '.ki/config' file.
-"""
-
-
-# MAYBES
-
-
-@beartype
-def nopath(path: Path) -> NoPath:
-    """
-    Maybe convert a path to a NoPath, i.e. a path that did not exist at
-    resolve-time, which is when this function was called.
-    """
-    path = path.resolve()
-    if path.exists():
-        raise ExpectedNonexistentPathError(path)
-    return NoPath(path)
-
-
-@beartype
-def nofile(path: Path) -> NoFile:
-    """
-    Maybe convert a path to a NoPath, i.e. a path that did not exist at
-    resolve-time, which is when this function was called.
-    """
-    path = path.resolve()
-    path = M.nopath(path)
-    M.xdir(path.parent)
-    return NoFile(path)
-
-
-@beartype
-def xfile(path: Path, info: str = "") -> File:
-    """
-    Attempt to instantiate a File.
-    """
-    # Resolve path.
-    path = path.resolve()
-
-    # Check that path exists and is a file.
-    if not path.exists():
-        raise MissingFileError(path, info)
-    if path.is_dir():
-        raise ExpectedFileButGotDirectoryError(path, info)
-    if not path.is_file():
-        raise StrangeExtantPathError(path, info)
-
-    # Must be an extant file.
-    return File(path)
-
-
-@beartype
-def xdir(path: Path, info: str = "") -> Dir:
-    """
-    Attempt to instantiate a Dir.
-    """
-    # Resolve path.
-    path = path.resolve()
-
-    # Check that path exists and is a directory.
-    if not path.exists():
-        raise MissingDirectoryError(path, info)
-    if path.is_dir():
-        return Dir(path)
-    if path.is_file():
-        raise ExpectedDirectoryButGotFileError(path, info)
-    raise StrangeExtantPathError(path, info)
-
-
-@beartype
-def emptydir(path: Path) -> Dir:
-    """
-    Attempt to instantiate an empty Dir.
-    """
-    # Check if it's an extant directory.
-    directory: Dir = M.xdir(path)
-    if F.is_empty(directory):
-        return EmptyDir(Path(directory).resolve())
-    raise ExpectedEmptyDirectoryButGotNonEmptyDirectoryError(directory)
-
-
-@beartype
-def repo(root: Dir) -> git.Repo:
-    """Read a git repo safely."""
-    try:
-        repository = git.Repo(root)
-    except git.InvalidGitRepositoryError as err:
-        # TODO: Make this error more descriptive. It currently sucks. A test
-        # should be written for 'M.kirepo()' in which we return this error.
-        raise err
-    return repository
-
-
-@beartype
-def kirepo(cwd: Dir) -> KiRepo:
-    """Get the containing ki repository of `path`."""
-    current = cwd
-
-    while not F.is_root(current):
-        kid = F.chk(current / KI)
-        if isinstance(kid, Dir):
-            break
-        current = F.parent(current)
-
-    if F.is_root(current):
-        raise NotKiRepoError()
-
-    # Root directory and ki directory of repo now guaranteed to exist.
-    root = current
-    repository: git.Repo = M.repo(root)
-
-    # Check that relevant files in .ki/ subdirectory exist.
-    backups_dir = M.xdir(kid / BACKUPS_DIR, info=BACKUPS_DIR_INFO)
-    config_file = M.xfile(kid / CONFIG_FILE, info=CONFIG_FILE_INFO)
-    hashes_file = M.xfile(kid / HASHES_FILE, info=HASHES_FILE_INFO)
-    models_file = M.xfile(root / MODELS_FILE, info=MODELS_FILE_INFO)
-
-    # Check that collection file exists.
-    config = configparser.ConfigParser()
-    config.read(config_file)
-    col_file = Path(config[REMOTE_CONFIG_SECTION][COLLECTION_FILE_PATH_CONFIG_FIELD])
-    col_file = M.xfile(col_file, info=COL_FILE_INFO)
-
-    return KiRepo(
-        repository,
-        root,
-        kid,
-        col_file,
-        backups_dir,
-        config_file,
-        hashes_file,
-        models_file,
-    )
-
-
-@beartype
-def rev(repository: git.Repo, sha: str) -> Rev:
-    """Validate a commit SHA against a repository and return a `Rev`."""
-    if not F.rev_exists(repository, sha):
-        raise GitRefNotFoundError(repository, sha)
-    return Rev(repository, sha)
-
-
-@beartype
-def head(repository: git.Repo) -> Rev:
-    """Return a `Rev` for HEAD of current branch."""
-    # GitPython raises a ValueError when references don't exist.
-    try:
-        r = Rev(repository, repository.head.commit.hexsha)
-    except ValueError as err:
-        raise GitHeadRefNotFoundError(repository, err) from err
-    return r
-
-
-@beartype
-def head_ki(kirepository: KiRepo) -> KiRev:
-    """Return a `KiRev` for HEAD of current branch."""
-    # GitPython raises a ValueError when references don't exist.
-    try:
-        r = KiRev(kirepository, kirepository.repo.head.commit.hexsha)
-    except ValueError as err:
-        raise GitHeadRefNotFoundError(kirepository.repo, err) from err
-    return r
-
-
-@beartype
-def collection(col_file: File) -> Collection:
-    """Open a collection or raise a pretty exception."""
-    # We hold cwd constant (otherwise Anki changes it).
-    cwd: Dir = F.cwd()
-    try:
-        col = Collection(col_file)
-    except anki.errors.DBError as err:
-        raise AnkiAlreadyOpenError(str(err)) from err
-    finally:
-        F.chdir(cwd)
-    return col
-
-
-@beartype
-def hardlink(l: Link) -> File:
-    """Replace a symlink with its target."""
-    # Treat true POSIX symlink case.
-    tgt = F.chk(l.resolve())
-    return F.copyfile(tgt, l)
-
-
-@beartype
-def filemode(file: Union[File, Dir, PseudoFile, Link]) -> int:
-    """Get git file mode."""
-    try:
-        # We must search from file upwards in case inside submodule.
-        root_repo = git.Repo(file, search_parent_directories=True)
-        out = root_repo.git.ls_files(["-s", str(file)])
-
-        # Treat case where file is untracked.
-        if out == "":
-            return -1
-
-        mode: int = int(out.split()[0])
-    except Exception as err:
-        raise GitFileModeParseError(file, out) from err
-    return mode
-
-
-@beartype
-def template(t: Dict[str, Any]) -> Template:
-    """Construct a template."""
-    # pylint: disable=redefined-builtin
-    name, qfmt, afmt, ord = t["name"], t["qfmt"], t["afmt"], t["ord"]
-    return Template(name=name, qfmt=qfmt, afmt=afmt, ord=ord)
-
-
-@beartype
-def field(fld: Dict[str, Any]) -> Field:
-    """Construct a field."""
-    return Field(name=fld["name"], ord=fld["ord"])
-
-
-@beartype
-def notetype(nt: Dict[str, Any]) -> Notetype:
-    """
-    Convert an Anki NotetypeDict into a Notetype dataclass.
-
-    Anki returns objects of type `NotetypeDict` (see pylib/anki/models.py)
-    when you call a method like `col.models.all()`. This is a dictionary
-    mapping strings to various stuff, and we read all its data into a python
-    dataclass here so that we can access it safely. Since we don't expect Anki
-    to ever give us 'invalid' notetypes (since we define 'valid' as being
-    processable by Anki), we return an exception if the parse fails.
-
-    Note on naming convention: Below, abbreviated variable names represent
-    dicts coming from Anki, like `nt: NotetypeDict` or `fld: FieldDict`.
-    Full words like `field: Field` represent ki dataclasses. The parameters
-    of the dataclasses, however, use abbreviations for consistency with Anki
-    map keys.
-    """
-    # If we can't even read the name of the notetype, then we can't print out a
-    # nice error message in the event of a `KeyError`. So we have to print out
-    # a different error message saying that the notetype doesn't have a name
-    # field.
-    try:
-        nt["name"]
-    except KeyError as err:
-        raise UnnamedNotetypeError(nt) from err
-    try:
-        fields: Dict[int, Field] = {fld["ord"]: M.field(fld) for fld in nt["flds"]}
-        if nt["sortf"] not in fields:
-            raise MissingFieldOrdinalError(ord=nt["sortf"], model=nt["name"])
-        return Notetype(
-            id=nt["id"],
-            name=nt["name"],
-            type=nt["type"],
-            flds=list(fields.values()),
-            tmpls=list(map(M.template, nt["tmpls"])),
-            sortf=fields[nt["sortf"]],
-            dict=nt,
-        )
-    except KeyError as err:
-        raise NotetypeKeyError(key=str(err), name=str(nt["name"])) from err
-
-
-@beartype
-def colnote(col: Collection, nid: int) -> ColNote:
-    """Get a dataclass representation of an Anki note."""
-    try:
-        note = col.get_note(nid)
-    except NotFoundError as err:
-        raise MissingNoteIdError(nid) from err
-    nt: Notetype = M.notetype(note.note_type())
-
-    # Get sort field content. See comment where we subscript in the same way in
-    # `push_note()`.
-    try:
-        sfld: str = note[nt.sortf.name]
-    except KeyError as err:
-        raise NoteFieldKeyError(str(err), nid) from err
-
-    # TODO: Remove implicit assumption that all cards are in the same deck, and
-    # work with cards instead of notes.
-    try:
-        deck = col.decks.name(note.cards()[0].did)
-    except IndexError as err:
-        F.red(f"{note.cards() = }")
-        F.red(f"{note.guid = }")
-        F.red(f"{note.id = }")
-        raise err
-    return ColNote(
-        n=note,
-        new=False,
-        deck=deck,
-        title="",
-        markdown=False,
-        notetype=nt,
-        sfld=sfld,
-    )
-
-
-@beartype
-def deckd(deck_name: str, targetdir: Dir) -> Dir:
-    """
-    Construct path to deck directory and create it, allowing the case in which
-    the directory already exists because we already created one of its
-    children, in which case this function is a no-op.
-    """
-    # Strip leading periods so we don't get hidden folders.
-    components = deck_name.split("::")
-    components = [re.sub(r"^\.", r"", comp) for comp in components]
-    components = [re.sub(r"/", r"-", comp) for comp in components]
-    deck_path = Path(targetdir, *components)
-    return F.force_mkdir(deck_path)
-
-
-@curried
-@beartype
-def tree(col: Collection, targetd: Dir, root: DeckTreeNode) -> Union[Root, Deck]:
-    """Get the deck directory and did for a decknode."""
-    did = root.deck_id
-    name = col.decks.name(did)
-    children: List[Deck] = list(map(M.tree(col, targetd), root.children))
-    if root.deck_id == 0:
-        deckdir, mediadir = None, None
-        return Root(
-            did=did,
-            node=root,
-            deckd=None,
-            mediad=None,
-            fullname=name,
-            children=children,
-        )
-    deckdir = M.deckd(name, targetd)
-    mediadir: Dir = F.force_mkdir(deckdir / MEDIA)
-    return Deck(
-        did=did,
-        node=root,
-        deckd=deckdir,
-        mediad=mediadir,
-        fullname=name,
-        children=children,
-    )
-
-
-@curried
-@beartype
-def link(targetd: Dir, l: PlannedLink) -> None:
-    """Create the symlink `l`."""
-    distance = len(l.link.parent.relative_to(targetd).parts)
-    target: Path = Path("../" * distance) / l.tgt.relative_to(targetd)
-    try:
-        F.symlink(l.link, target)
-    except OSError as _:
-        trace = traceback.format_exc(limit=3)
-        F.yellow(f"Failed to create symlink '{l.link}' -> '{target}'\n{trace}")
-
-
-@beartype
-def empty_kirepo(root: EmptyDir) -> Tuple[EmptyDir, EmptyDir]:
-    """Initialize subdirs for a ki repo."""
-    kidir = F.mksubdir(root, Path(KI))
-    mediadir = F.mksubdir(EmptyDir(root), Path(MEDIA))
-    return kidir, mediadir
-
-
-@beartype
-def dotki(kidir: EmptyDir) -> DotKi:
-    """Create empty metadata files in `.ki/`."""
-    config = F.touch(kidir, CONFIG_FILE)
-    backups = F.mksubdir(kidir, Path(BACKUPS_DIR))
-    return DotKi(config=config, backups=backups)
-
-
-@curried
-@beartype
-def submodule(parent_repo: git.Repo, sm: git.Submodule) -> Submodule:
-    """
-    Construct a map that sends submodule relative roots, that is, the relative
-    path of a submodule root directory to the top-level root directory of the
-    ki repository, to `git.Repo` objects for each submodule.
-    """
-    sm_repo: git.Repo = sm.module()
-    sm_root: Dir = F.root(sm_repo)
-    sm_rel_root: Path = sm_root.relative_to(F.root(parent_repo))
-    try:
-        branch = sm_repo.active_branch.name
-    except TypeError:
-        h: git.Head = next(iter(sm_repo.branches))
-        branch = h.name
-    return Submodule(sm=sm, sm_repo=sm_repo, rel_root=sm_rel_root, branch=branch)
-
-
-@beartype
-def submodules(r: git.Repo) -> Dict[Path, Submodule]:
-    """Map submodule relative roots to `Submodule`s."""
-    sms: Iterable[git.Submodule] = r.submodules
-    sms = filter(lambda sm: sm.exists() and sm.module_exists(), sms)
-    subs: Iterable[Submodule] = map(M.submodule(r), sms)
-    return {s.rel_root: s for s in subs}
-
-
-@beartype
-def gitcopy(r: git.Repo, remote_root: Dir, unsub: bool) -> git.Repo:
-    """Replace all files in `r` with contents of `remote_root`."""
-    git_copy = F.copytree(F.gitd(r), F.chk(F.mkdtemp() / "GIT"))
-    r.close()
-    root: NoFile = F.rmtree(F.root(r))
-    del r
-    root: Dir = F.copytree(remote_root, root)
-
-    r: git.Repo = M.repo(root)
-    if unsub:
-        r = F.unsubmodule(r)
-    gitd: NoPath = F.rmtree(F.gitd(r))
-    del r
-    F.copytree(git_copy, F.chk(gitd))
-
-    # Note that we do not commit, so changes are in working tree.
-    r: git.Repo = M.repo(root)
-    return r
-
-
-@beartype
-def parser_and_transformer() -> Tuple[Lark, NoteTransformer]:
-    """Read grammar."""
-    # TODO: Should we assume this always exists? A nice error message should be
-    # printed on initialization if the grammar file is missing. No computation
-    # should be done, and none of the click commands should work.
-    grammar_path = Path(__file__).resolve().parent / "grammar.lark"
-    grammar = grammar_path.read_text(encoding="UTF-8")
-
-    # Instantiate parser.
-    parser = Lark(grammar, start="note", parser="lalr")
-    transformer = NoteTransformer()
-    return parser, transformer
-
-
-
-
-
-
-
-

Functions

-
-
-def collection(col_file: File) ‑> anki.collection.Collection -
-
-

Open a collection or raise a pretty exception.

-
- -Expand source code - -
@beartype
-def collection(col_file: File) -> Collection:
-    """Open a collection or raise a pretty exception."""
-    # We hold cwd constant (otherwise Anki changes it).
-    cwd: Dir = F.cwd()
-    try:
-        col = Collection(col_file)
-    except anki.errors.DBError as err:
-        raise AnkiAlreadyOpenError(str(err)) from err
-    finally:
-        F.chdir(cwd)
-    return col
-
-
-
-def colnote(col: anki.collection.Collection, nid: int) ‑> ColNote -
-
-

Get a dataclass representation of an Anki note.

-
- -Expand source code - -
@beartype
-def colnote(col: Collection, nid: int) -> ColNote:
-    """Get a dataclass representation of an Anki note."""
-    try:
-        note = col.get_note(nid)
-    except NotFoundError as err:
-        raise MissingNoteIdError(nid) from err
-    nt: Notetype = M.notetype(note.note_type())
-
-    # Get sort field content. See comment where we subscript in the same way in
-    # `push_note()`.
-    try:
-        sfld: str = note[nt.sortf.name]
-    except KeyError as err:
-        raise NoteFieldKeyError(str(err), nid) from err
-
-    # TODO: Remove implicit assumption that all cards are in the same deck, and
-    # work with cards instead of notes.
-    try:
-        deck = col.decks.name(note.cards()[0].did)
-    except IndexError as err:
-        F.red(f"{note.cards() = }")
-        F.red(f"{note.guid = }")
-        F.red(f"{note.id = }")
-        raise err
-    return ColNote(
-        n=note,
-        new=False,
-        deck=deck,
-        title="",
-        markdown=False,
-        notetype=nt,
-        sfld=sfld,
-    )
-
-
-
-def deckd(deck_name: str, targetdir: Dir) ‑> Dir -
-
-

Construct path to deck directory and create it, allowing the case in which -the directory already exists because we already created one of its -children, in which case this function is a no-op.

-
- -Expand source code - -
@beartype
-def deckd(deck_name: str, targetdir: Dir) -> Dir:
-    """
-    Construct path to deck directory and create it, allowing the case in which
-    the directory already exists because we already created one of its
-    children, in which case this function is a no-op.
-    """
-    # Strip leading periods so we don't get hidden folders.
-    components = deck_name.split("::")
-    components = [re.sub(r"^\.", r"", comp) for comp in components]
-    components = [re.sub(r"/", r"-", comp) for comp in components]
-    deck_path = Path(targetdir, *components)
-    return F.force_mkdir(deck_path)
-
-
-
-def dotki(kidir: EmptyDir) ‑> DotKi -
-
-

Create empty metadata files in .ki/.

-
- -Expand source code - -
@beartype
-def dotki(kidir: EmptyDir) -> DotKi:
-    """Create empty metadata files in `.ki/`."""
-    config = F.touch(kidir, CONFIG_FILE)
-    backups = F.mksubdir(kidir, Path(BACKUPS_DIR))
-    return DotKi(config=config, backups=backups)
-
-
-
-def empty_kirepo(root: EmptyDir) ‑> tuple[EmptyDirEmptyDir] -
-
-

Initialize subdirs for a ki repo.

-
- -Expand source code - -
@beartype
-def empty_kirepo(root: EmptyDir) -> Tuple[EmptyDir, EmptyDir]:
-    """Initialize subdirs for a ki repo."""
-    kidir = F.mksubdir(root, Path(KI))
-    mediadir = F.mksubdir(EmptyDir(root), Path(MEDIA))
-    return kidir, mediadir
-
-
-
-def emptydir(path: pathlib.Path) ‑> Dir -
-
-

Attempt to instantiate an empty Dir.

-
- -Expand source code - -
@beartype
-def emptydir(path: Path) -> Dir:
-    """
-    Attempt to instantiate an empty Dir.
-    """
-    # Check if it's an extant directory.
-    directory: Dir = M.xdir(path)
-    if F.is_empty(directory):
-        return EmptyDir(Path(directory).resolve())
-    raise ExpectedEmptyDirectoryButGotNonEmptyDirectoryError(directory)
-
-
-
-def field(fld: dict[str, typing.Any]) ‑> Field -
-
-

Construct a field.

-
- -Expand source code - -
@beartype
-def field(fld: Dict[str, Any]) -> Field:
-    """Construct a field."""
-    return Field(name=fld["name"], ord=fld["ord"])
-
-
-
-def filemode(file: Union[FileDirPseudoFileLink]) ‑> int -
-
-

Get git file mode.

-
- -Expand source code - -
@beartype
-def filemode(file: Union[File, Dir, PseudoFile, Link]) -> int:
-    """Get git file mode."""
-    try:
-        # We must search from file upwards in case inside submodule.
-        root_repo = git.Repo(file, search_parent_directories=True)
-        out = root_repo.git.ls_files(["-s", str(file)])
-
-        # Treat case where file is untracked.
-        if out == "":
-            return -1
-
-        mode: int = int(out.split()[0])
-    except Exception as err:
-        raise GitFileModeParseError(file, out) from err
-    return mode
-
-
-
-def gitcopy(r: git.repo.base.Repo, remote_root: Dir, unsub: bool) ‑> git.repo.base.Repo -
-
-

Replace all files in r with contents of remote_root.

-
- -Expand source code - -
@beartype
-def gitcopy(r: git.Repo, remote_root: Dir, unsub: bool) -> git.Repo:
-    """Replace all files in `r` with contents of `remote_root`."""
-    git_copy = F.copytree(F.gitd(r), F.chk(F.mkdtemp() / "GIT"))
-    r.close()
-    root: NoFile = F.rmtree(F.root(r))
-    del r
-    root: Dir = F.copytree(remote_root, root)
-
-    r: git.Repo = M.repo(root)
-    if unsub:
-        r = F.unsubmodule(r)
-    gitd: NoPath = F.rmtree(F.gitd(r))
-    del r
-    F.copytree(git_copy, F.chk(gitd))
-
-    # Note that we do not commit, so changes are in working tree.
-    r: git.Repo = M.repo(root)
-    return r
-
-
- -
-

Replace a symlink with its target.

-
- -Expand source code - -
@beartype
-def hardlink(l: Link) -> File:
-    """Replace a symlink with its target."""
-    # Treat true POSIX symlink case.
-    tgt = F.chk(l.resolve())
-    return F.copyfile(tgt, l)
-
-
-
-def head(repository: git.repo.base.Repo) ‑> Rev -
-
-

Return a Rev for HEAD of current branch.

-
- -Expand source code - -
@beartype
-def head(repository: git.Repo) -> Rev:
-    """Return a `Rev` for HEAD of current branch."""
-    # GitPython raises a ValueError when references don't exist.
-    try:
-        r = Rev(repository, repository.head.commit.hexsha)
-    except ValueError as err:
-        raise GitHeadRefNotFoundError(repository, err) from err
-    return r
-
-
-
-def head_ki(kirepository: KiRepo) ‑> KiRev -
-
-

Return a KiRev for HEAD of current branch.

-
- -Expand source code - -
@beartype
-def head_ki(kirepository: KiRepo) -> KiRev:
-    """Return a `KiRev` for HEAD of current branch."""
-    # GitPython raises a ValueError when references don't exist.
-    try:
-        r = KiRev(kirepository, kirepository.repo.head.commit.hexsha)
-    except ValueError as err:
-        raise GitHeadRefNotFoundError(kirepository.repo, err) from err
-    return r
-
-
-
-def kirepo(cwd: Dir) ‑> KiRepo -
-
-

Get the containing ki repository of path.

-
- -Expand source code - -
@beartype
-def kirepo(cwd: Dir) -> KiRepo:
-    """Get the containing ki repository of `path`."""
-    current = cwd
-
-    while not F.is_root(current):
-        kid = F.chk(current / KI)
-        if isinstance(kid, Dir):
-            break
-        current = F.parent(current)
-
-    if F.is_root(current):
-        raise NotKiRepoError()
-
-    # Root directory and ki directory of repo now guaranteed to exist.
-    root = current
-    repository: git.Repo = M.repo(root)
-
-    # Check that relevant files in .ki/ subdirectory exist.
-    backups_dir = M.xdir(kid / BACKUPS_DIR, info=BACKUPS_DIR_INFO)
-    config_file = M.xfile(kid / CONFIG_FILE, info=CONFIG_FILE_INFO)
-    hashes_file = M.xfile(kid / HASHES_FILE, info=HASHES_FILE_INFO)
-    models_file = M.xfile(root / MODELS_FILE, info=MODELS_FILE_INFO)
-
-    # Check that collection file exists.
-    config = configparser.ConfigParser()
-    config.read(config_file)
-    col_file = Path(config[REMOTE_CONFIG_SECTION][COLLECTION_FILE_PATH_CONFIG_FIELD])
-    col_file = M.xfile(col_file, info=COL_FILE_INFO)
-
-    return KiRepo(
-        repository,
-        root,
-        kid,
-        col_file,
-        backups_dir,
-        config_file,
-        hashes_file,
-        models_file,
-    )
-
-
- -
-

Create the symlink l.

-
- -Expand source code - -
@curried
-@beartype
-def link(targetd: Dir, l: PlannedLink) -> None:
-    """Create the symlink `l`."""
-    distance = len(l.link.parent.relative_to(targetd).parts)
-    target: Path = Path("../" * distance) / l.tgt.relative_to(targetd)
-    try:
-        F.symlink(l.link, target)
-    except OSError as _:
-        trace = traceback.format_exc(limit=3)
-        F.yellow(f"Failed to create symlink '{l.link}' -> '{target}'\n{trace}")
-
-
-
-def nofile(path: pathlib.Path) ‑> NoFile -
-
-

Maybe convert a path to a NoPath, i.e. a path that did not exist at -resolve-time, which is when this function was called.

-
- -Expand source code - -
@beartype
-def nofile(path: Path) -> NoFile:
-    """
-    Maybe convert a path to a NoPath, i.e. a path that did not exist at
-    resolve-time, which is when this function was called.
-    """
-    path = path.resolve()
-    path = M.nopath(path)
-    M.xdir(path.parent)
-    return NoFile(path)
-
-
-
-def nopath(path: pathlib.Path) ‑> NoPath -
-
-

Maybe convert a path to a NoPath, i.e. a path that did not exist at -resolve-time, which is when this function was called.

-
- -Expand source code - -
@beartype
-def nopath(path: Path) -> NoPath:
-    """
-    Maybe convert a path to a NoPath, i.e. a path that did not exist at
-    resolve-time, which is when this function was called.
-    """
-    path = path.resolve()
-    if path.exists():
-        raise ExpectedNonexistentPathError(path)
-    return NoPath(path)
-
-
-
-def notetype(nt: dict[str, typing.Any]) ‑> Notetype -
-
-

Convert an Anki NotetypeDict into a Notetype dataclass.

-

Anki returns objects of type NotetypeDict (see pylib/anki/models.py) -when you call a method like col.models.all(). This is a dictionary -mapping strings to various stuff, and we read all its data into a python -dataclass here so that we can access it safely. Since we don't expect Anki -to ever give us 'invalid' notetypes (since we define 'valid' as being -processable by Anki), we return an exception if the parse fails.

-

Note on naming convention: Below, abbreviated variable names represent -dicts coming from Anki, like nt: NotetypeDict or fld: FieldDict. -Full words like field: Field represent ki dataclasses. The parameters -of the dataclasses, however, use abbreviations for consistency with Anki -map keys.

-
- -Expand source code - -
@beartype
-def notetype(nt: Dict[str, Any]) -> Notetype:
-    """
-    Convert an Anki NotetypeDict into a Notetype dataclass.
-
-    Anki returns objects of type `NotetypeDict` (see pylib/anki/models.py)
-    when you call a method like `col.models.all()`. This is a dictionary
-    mapping strings to various stuff, and we read all its data into a python
-    dataclass here so that we can access it safely. Since we don't expect Anki
-    to ever give us 'invalid' notetypes (since we define 'valid' as being
-    processable by Anki), we return an exception if the parse fails.
-
-    Note on naming convention: Below, abbreviated variable names represent
-    dicts coming from Anki, like `nt: NotetypeDict` or `fld: FieldDict`.
-    Full words like `field: Field` represent ki dataclasses. The parameters
-    of the dataclasses, however, use abbreviations for consistency with Anki
-    map keys.
-    """
-    # If we can't even read the name of the notetype, then we can't print out a
-    # nice error message in the event of a `KeyError`. So we have to print out
-    # a different error message saying that the notetype doesn't have a name
-    # field.
-    try:
-        nt["name"]
-    except KeyError as err:
-        raise UnnamedNotetypeError(nt) from err
-    try:
-        fields: Dict[int, Field] = {fld["ord"]: M.field(fld) for fld in nt["flds"]}
-        if nt["sortf"] not in fields:
-            raise MissingFieldOrdinalError(ord=nt["sortf"], model=nt["name"])
-        return Notetype(
-            id=nt["id"],
-            name=nt["name"],
-            type=nt["type"],
-            flds=list(fields.values()),
-            tmpls=list(map(M.template, nt["tmpls"])),
-            sortf=fields[nt["sortf"]],
-            dict=nt,
-        )
-    except KeyError as err:
-        raise NotetypeKeyError(key=str(err), name=str(nt["name"])) from err
-
-
-
-def parser_and_transformer() ‑> tuple[lark.lark.Lark, NoteTransformer] -
-
-

Read grammar.

-
- -Expand source code - -
@beartype
-def parser_and_transformer() -> Tuple[Lark, NoteTransformer]:
-    """Read grammar."""
-    # TODO: Should we assume this always exists? A nice error message should be
-    # printed on initialization if the grammar file is missing. No computation
-    # should be done, and none of the click commands should work.
-    grammar_path = Path(__file__).resolve().parent / "grammar.lark"
-    grammar = grammar_path.read_text(encoding="UTF-8")
-
-    # Instantiate parser.
-    parser = Lark(grammar, start="note", parser="lalr")
-    transformer = NoteTransformer()
-    return parser, transformer
-
-
-
-def repo(root: Dir) ‑> git.repo.base.Repo -
-
-

Read a git repo safely.

-
- -Expand source code - -
@beartype
-def repo(root: Dir) -> git.Repo:
-    """Read a git repo safely."""
-    try:
-        repository = git.Repo(root)
-    except git.InvalidGitRepositoryError as err:
-        # TODO: Make this error more descriptive. It currently sucks. A test
-        # should be written for 'M.kirepo()' in which we return this error.
-        raise err
-    return repository
-
-
-
-def rev(repository: git.repo.base.Repo, sha: str) ‑> Rev -
-
-

Validate a commit SHA against a repository and return a Rev.

-
- -Expand source code - -
@beartype
-def rev(repository: git.Repo, sha: str) -> Rev:
-    """Validate a commit SHA against a repository and return a `Rev`."""
-    if not F.rev_exists(repository, sha):
-        raise GitRefNotFoundError(repository, sha)
-    return Rev(repository, sha)
-
-
-
-def submodule(parent_repo: git.repo.base.Repo, sm: git.objects.submodule.base.Submodule) ‑> Submodule -
-
-

Construct a map that sends submodule relative roots, that is, the relative -path of a submodule root directory to the top-level root directory of the -ki repository, to git.Repo objects for each submodule.

-
- -Expand source code - -
@curried
-@beartype
-def submodule(parent_repo: git.Repo, sm: git.Submodule) -> Submodule:
-    """
-    Construct a map that sends submodule relative roots, that is, the relative
-    path of a submodule root directory to the top-level root directory of the
-    ki repository, to `git.Repo` objects for each submodule.
-    """
-    sm_repo: git.Repo = sm.module()
-    sm_root: Dir = F.root(sm_repo)
-    sm_rel_root: Path = sm_root.relative_to(F.root(parent_repo))
-    try:
-        branch = sm_repo.active_branch.name
-    except TypeError:
-        h: git.Head = next(iter(sm_repo.branches))
-        branch = h.name
-    return Submodule(sm=sm, sm_repo=sm_repo, rel_root=sm_rel_root, branch=branch)
-
-
-
-def submodules(r: git.repo.base.Repo) ‑> dict[pathlib.Path, Submodule] -
-
-

Map submodule relative roots to Submodules.

-
- -Expand source code - -
@beartype
-def submodules(r: git.Repo) -> Dict[Path, Submodule]:
-    """Map submodule relative roots to `Submodule`s."""
-    sms: Iterable[git.Submodule] = r.submodules
-    sms = filter(lambda sm: sm.exists() and sm.module_exists(), sms)
-    subs: Iterable[Submodule] = map(M.submodule(r), sms)
-    return {s.rel_root: s for s in subs}
-
-
-
-def template(t: dict[str, typing.Any]) ‑> Template -
-
-

Construct a template.

-
- -Expand source code - -
@beartype
-def template(t: Dict[str, Any]) -> Template:
-    """Construct a template."""
-    # pylint: disable=redefined-builtin
-    name, qfmt, afmt, ord = t["name"], t["qfmt"], t["afmt"], t["ord"]
-    return Template(name=name, qfmt=qfmt, afmt=afmt, ord=ord)
-
-
-
-def tree(col: anki.collection.Collection, targetd: Dir, root: anki.decks_pb2.DeckTreeNode) ‑> Union[RootDeck] -
-
-

Get the deck directory and did for a decknode.

-
- -Expand source code - -
@curried
-@beartype
-def tree(col: Collection, targetd: Dir, root: DeckTreeNode) -> Union[Root, Deck]:
-    """Get the deck directory and did for a decknode."""
-    did = root.deck_id
-    name = col.decks.name(did)
-    children: List[Deck] = list(map(M.tree(col, targetd), root.children))
-    if root.deck_id == 0:
-        deckdir, mediadir = None, None
-        return Root(
-            did=did,
-            node=root,
-            deckd=None,
-            mediad=None,
-            fullname=name,
-            children=children,
-        )
-    deckdir = M.deckd(name, targetd)
-    mediadir: Dir = F.force_mkdir(deckdir / MEDIA)
-    return Deck(
-        did=did,
-        node=root,
-        deckd=deckdir,
-        mediad=mediadir,
-        fullname=name,
-        children=children,
-    )
-
-
-
-def xdir(path: pathlib.Path, info: str = '') ‑> Dir -
-
-

Attempt to instantiate a Dir.

-
- -Expand source code - -
@beartype
-def xdir(path: Path, info: str = "") -> Dir:
-    """
-    Attempt to instantiate a Dir.
-    """
-    # Resolve path.
-    path = path.resolve()
-
-    # Check that path exists and is a directory.
-    if not path.exists():
-        raise MissingDirectoryError(path, info)
-    if path.is_dir():
-        return Dir(path)
-    if path.is_file():
-        raise ExpectedDirectoryButGotFileError(path, info)
-    raise StrangeExtantPathError(path, info)
-
-
-
-def xfile(path: pathlib.Path, info: str = '') ‑> File -
-
-

Attempt to instantiate a File.

-
- -Expand source code - -
@beartype
-def xfile(path: Path, info: str = "") -> File:
-    """
-    Attempt to instantiate a File.
-    """
-    # Resolve path.
-    path = path.resolve()
-
-    # Check that path exists and is a file.
-    if not path.exists():
-        raise MissingFileError(path, info)
-    if path.is_dir():
-        raise ExpectedFileButGotDirectoryError(path, info)
-    if not path.is_file():
-        raise StrangeExtantPathError(path, info)
-
-    # Must be an extant file.
-    return File(path)
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/monadic.html b/docs/monadic.html deleted file mode 100644 index 2295e610..00000000 --- a/docs/monadic.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - -ki.monadic API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.monadic

-
-
-

A function decorator to lift functions up to the category result.Result.

-
- -Expand source code - -
"""A function decorator to lift functions up to the category ``result.Result``."""
-import random
-import functools
-import itertools
-from result import Ok, Err
-from beartype import beartype
-from beartype.typing import Callable, TypeVar, NoReturn
-
-from beartype.roar._roarexc import (
-    BeartypeCallHintReturnViolation,
-    _BeartypeCallHintPepRaiseException,
-)
-from beartype._decor._error._errorsleuth import CauseSleuth
-from beartype._util.text.utiltextlabel import (
-    prefix_callable_decorated_return_value,
-)
-from beartype._util.hint.utilhinttest import die_unless_hint
-from beartype._util.text.utiltextmunge import suffix_unless_suffixed
-
-# pylint: disable=invalid-name
-
-T = TypeVar("T")
-
-PITH_NAME = "return"
-
-
-@beartype
-def raise_if_return_type_exception(
-    func: Callable[[...], T],
-    exception_prefix: str,
-    pith_value: object,
-    hint: object,
-    helper: str,
-) -> None:
-    """
-    Typecheck the return value of a function decorated with ``@monadic``.
-
-    Raise an error if ``pith_value`` doesn't match the type specified by
-    ``hint``. This is a snippet copied from the internal implementation of
-    beartype, and so it should be pinned to a version to avoid breakage when
-    the private API inevitably changes.
-    """
-
-    # If this is *NOT* the PEP 484-compliant "typing.NoReturn" type hint
-    # permitted *ONLY* as a return annotation, this is a standard type hint
-    # generally supported by both parameters and return values. In this case...
-    if hint is not NoReturn:
-        # If type hint is *NOT* a supported type hint, raise an exception.
-        die_unless_hint(hint=hint, exception_prefix=exception_prefix)
-        # Else, this type hint is supported.
-
-    # Human-readable string describing the failure of this pith to satisfy this
-    # hint if this pith fails to satisfy this hint *OR* "None" otherwise (i.e.,
-    # if this pith satisfies this hint).
-    exception_cause = CauseSleuth(
-        func=func,
-        pith=pith_value,
-        hint=hint,
-        cause_indent="",
-        exception_prefix=exception_prefix,
-        random_int=random.getrandbits(32),
-    ).get_cause_or_none()
-
-    # If this pith does *NOT* satisfy this hint...
-    if exception_cause:
-        # This failure suffixed by a period if *NOT* yet suffixed by a period.
-        exception_cause_suffixed = suffix_unless_suffixed(
-            text=exception_cause, suffix="."
-        )
-
-        # Raise an exception of the desired class embedding this cause.
-        raise BeartypeCallHintReturnViolation(  # type: ignore[misc]
-            f"{exception_prefix}violates {helper} type hint {repr(hint)}, as "
-            f"{exception_cause_suffixed}"
-        )
-
-
-@beartype
-def monadic(func: Callable[[...], T]) -> Callable[[...], T]:
-    """A function decorator to chain functions that return a ``result.Result``."""
-
-    @functools.wraps(func)
-    def decorated(*args, **kwargs) -> T:
-        """The decorated version of ``func``."""
-        # If any of the arguments are Err, return early.
-        for arg in itertools.chain(args, kwargs.values()):
-            if isinstance(arg, Err):
-                return arg
-
-        # Unpack arguments of type ``Ok`` passed to ``func()``. We pass the
-        # modified versions of ``args``, ``kwargs`` to ``func()`` because if we
-        # do not do this unpacking step, functions that are @beartype decorated
-        # underneath the @monadic decorator will ALWAYS fail to typecheck, since
-        # they are expected whatever type hint the user provided, but they are
-        # (possibly) receiving ``OkErr`` types.
-        unpack = lambda arg: arg.value if isinstance(arg, Ok) else arg
-        args = tuple(map(unpack, args))
-        kwargs = {key: unpack(arg) for key, arg in kwargs.items()}
-
-        # Call function with the OkErr-unpacked ``args`` and ``kwargs``.
-        result = func(*args, **kwargs)
-
-        # Human-readable label describing the unpacked return value.
-        exception_prefix: str = prefix_callable_decorated_return_value(
-            func=func, return_value=result.value
-        )
-
-        # If the return value is unannotated, raise an exception.
-        if PITH_NAME not in func.__annotations__:
-            raise _BeartypeCallHintPepRaiseException(f"{exception_prefix}unannotated.")
-
-        # Unfold the return type annotation of ``func``, extracting the ``Ok``
-        # return type hint and the ``Err`` return type hint.
-        ok_hint, err_hint = func.__annotations__[PITH_NAME].__args__
-        ok_ret_hint = ok_hint.__args__[0]
-        err_ret_hint = err_hint.__args__[0]
-
-        # Check return type, raising an error if the check fails. These calls
-        # are no-ops otherwise.
-        if isinstance(result, Ok):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, ok_ret_hint, "Ok"
-            )
-        elif isinstance(result, Err):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, err_ret_hint, "Err"
-            )
-
-        # Return the un-unpacked result, since this is what the user expects
-        # (``func()`` is decorated with @monadic, and so it is intended to return
-        # a value of type ``result.Result``).
-        return result
-
-    return decorated
-
-
-
-
-
-
-
-

Functions

-
-
-def monadic(func: collections.abc.Callable[..., ~T]) ‑> collections.abc.Callable[..., ~T] -
-
-

A function decorator to chain functions that return a result.Result.

-
- -Expand source code - -
@beartype
-def monadic(func: Callable[[...], T]) -> Callable[[...], T]:
-    """A function decorator to chain functions that return a ``result.Result``."""
-
-    @functools.wraps(func)
-    def decorated(*args, **kwargs) -> T:
-        """The decorated version of ``func``."""
-        # If any of the arguments are Err, return early.
-        for arg in itertools.chain(args, kwargs.values()):
-            if isinstance(arg, Err):
-                return arg
-
-        # Unpack arguments of type ``Ok`` passed to ``func()``. We pass the
-        # modified versions of ``args``, ``kwargs`` to ``func()`` because if we
-        # do not do this unpacking step, functions that are @beartype decorated
-        # underneath the @monadic decorator will ALWAYS fail to typecheck, since
-        # they are expected whatever type hint the user provided, but they are
-        # (possibly) receiving ``OkErr`` types.
-        unpack = lambda arg: arg.value if isinstance(arg, Ok) else arg
-        args = tuple(map(unpack, args))
-        kwargs = {key: unpack(arg) for key, arg in kwargs.items()}
-
-        # Call function with the OkErr-unpacked ``args`` and ``kwargs``.
-        result = func(*args, **kwargs)
-
-        # Human-readable label describing the unpacked return value.
-        exception_prefix: str = prefix_callable_decorated_return_value(
-            func=func, return_value=result.value
-        )
-
-        # If the return value is unannotated, raise an exception.
-        if PITH_NAME not in func.__annotations__:
-            raise _BeartypeCallHintPepRaiseException(f"{exception_prefix}unannotated.")
-
-        # Unfold the return type annotation of ``func``, extracting the ``Ok``
-        # return type hint and the ``Err`` return type hint.
-        ok_hint, err_hint = func.__annotations__[PITH_NAME].__args__
-        ok_ret_hint = ok_hint.__args__[0]
-        err_ret_hint = err_hint.__args__[0]
-
-        # Check return type, raising an error if the check fails. These calls
-        # are no-ops otherwise.
-        if isinstance(result, Ok):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, ok_ret_hint, "Ok"
-            )
-        elif isinstance(result, Err):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, err_ret_hint, "Err"
-            )
-
-        # Return the un-unpacked result, since this is what the user expects
-        # (``func()`` is decorated with @monadic, and so it is intended to return
-        # a value of type ``result.Result``).
-        return result
-
-    return decorated
-
-
-
-def raise_if_return_type_exception(func: collections.abc.Callable[..., ~T], exception_prefix: str, pith_value: object, hint: object, helper: str) ‑> None -
-
-

Typecheck the return value of a function decorated with @monadic.

-

Raise an error if pith_value doesn't match the type specified by -hint. This is a snippet copied from the internal implementation of -beartype, and so it should be pinned to a version to avoid breakage when -the private API inevitably changes.

-
- -Expand source code - -
@beartype
-def raise_if_return_type_exception(
-    func: Callable[[...], T],
-    exception_prefix: str,
-    pith_value: object,
-    hint: object,
-    helper: str,
-) -> None:
-    """
-    Typecheck the return value of a function decorated with ``@monadic``.
-
-    Raise an error if ``pith_value`` doesn't match the type specified by
-    ``hint``. This is a snippet copied from the internal implementation of
-    beartype, and so it should be pinned to a version to avoid breakage when
-    the private API inevitably changes.
-    """
-
-    # If this is *NOT* the PEP 484-compliant "typing.NoReturn" type hint
-    # permitted *ONLY* as a return annotation, this is a standard type hint
-    # generally supported by both parameters and return values. In this case...
-    if hint is not NoReturn:
-        # If type hint is *NOT* a supported type hint, raise an exception.
-        die_unless_hint(hint=hint, exception_prefix=exception_prefix)
-        # Else, this type hint is supported.
-
-    # Human-readable string describing the failure of this pith to satisfy this
-    # hint if this pith fails to satisfy this hint *OR* "None" otherwise (i.e.,
-    # if this pith satisfies this hint).
-    exception_cause = CauseSleuth(
-        func=func,
-        pith=pith_value,
-        hint=hint,
-        cause_indent="",
-        exception_prefix=exception_prefix,
-        random_int=random.getrandbits(32),
-    ).get_cause_or_none()
-
-    # If this pith does *NOT* satisfy this hint...
-    if exception_cause:
-        # This failure suffixed by a period if *NOT* yet suffixed by a period.
-        exception_cause_suffixed = suffix_unless_suffixed(
-            text=exception_cause, suffix="."
-        )
-
-        # Raise an exception of the desired class embedding this cause.
-        raise BeartypeCallHintReturnViolation(  # type: ignore[misc]
-            f"{exception_prefix}violates {helper} type hint {repr(hint)}, as "
-            f"{exception_cause_suffixed}"
-        )
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/note.html b/docs/note.html deleted file mode 100644 index ad83e60b..00000000 --- a/docs/note.html +++ /dev/null @@ -1,452 +0,0 @@ - - - - - - -ki.note API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.note

-
-
-

A module containing a class for Anki notes.

-
- -Expand source code - -
#!/usr/bin/env python3
-"""A module containing a class for Anki notes."""
-import re
-import subprocess
-
-import bs4
-import anki
-import click
-import markdownify
-
-from beartype import beartype
-from beartype.typing import Dict, List
-
-from apy.anki import Note, Anki
-from apy.convert import markdown_to_html, html_to_markdown, _italize
-
-GENERATED_HTML_SENTINEL = "data-original-markdown"
-
-
-class KiNote(Note):
-    """
-    A subclass of ``apy.Note`` for applying transformations to HTML in note
-    fields. This is distinct from the anki ``Note`` class, which is accessible
-    using ``self.n``.
-
-    Parameters
-    ----------
-    a : apy.anki.Anki
-        Wrapper around Anki collection.
-    note : anki.notes.Note
-        Anki Note instance.
-    """
-
-    @beartype
-    def __init__(self, a: Anki, note: anki.notes.Note):
-
-        super().__init__(a, note)
-
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        self.deck = self.a.col.decks.name(self.n.cards()[0].did)
-
-        # Populate parsed fields.
-        self.fields: Dict[str, str] = {}
-        for key, field in self.n.items():
-            self.fields[key] = html_to_screen(field)
-
-    @beartype
-    def __repr__(self) -> str:
-        """Convert note to Markdown format"""
-        lines = self.get_header_lines()
-
-        for key, field in self.n.items():
-            lines.append("### " + key)
-            lines.append(html_to_screen(field))
-            lines.append("")
-
-        return "\n".join(lines)
-
-    @beartype
-    def get_header_lines(self) -> List[str]:
-        """Get header of markdown representation of note."""
-        lines = [
-            "## Note",
-            f"nid: {self.n.id}",
-            f"model: {self.model_name}",
-        ]
-
-        lines += [f"deck: {self.get_deck()}"]
-        lines += [f"tags: {self.get_tag_string()}"]
-
-        if not any(GENERATED_HTML_SENTINEL in field for field in self.n.values()):
-            lines += ["markdown: false"]
-
-        lines += [""]
-        return lines
-
-    @beartype
-    def get_deck(self) -> str:
-        """Return which deck the note belongs to."""
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        return self.deck
-
-    @beartype
-    def set_deck(self, deck: str) -> None:
-        """Move note to deck."""
-        newdid = self.a.col.decks.id(deck)
-        cids = [c.id for c in self.n.cards()]
-
-        if cids:
-            self.a.col.set_deck(cids, newdid)
-            self.a.modified = True
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        self.deck = self.a.col.decks.name(self.n.cards()[0].did)
-
-
-@beartype
-def html_to_screen(html: str) -> str:
-    """Convert html for printing to screen."""
-    html = re.sub(r"\<style\>.*\<\/style\>", "", html, flags=re.S)
-
-    generated = GENERATED_HTML_SENTINEL in html
-    if generated:
-        plain = html_to_markdown(html)
-        if html != markdown_to_html(plain):
-            html_clean = re.sub(r' data-original-markdown="[^"]*"', "", html)
-            plain += (
-                "\n\n### Current HTML → Markdown\n"
-                f"{markdownify.markdownify(html_clean)}"
-            )
-            plain += f"\n### Current HTML\n{html_clean}"
-    else:
-        plain = html
-
-    # For convenience: Un-escape some common LaTeX constructs
-    plain = plain.replace(r"\\\\", r"\\")
-    plain = plain.replace(r"\\{", r"\{")
-    plain = plain.replace(r"\\}", r"\}")
-    plain = plain.replace(r"\*}", r"*}")
-
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-
-    plain = plain.replace("<br>", "\n")
-    plain = plain.replace("<br/>", "\n")
-    plain = plain.replace("<br />", "\n")
-    plain = plain.replace("<div>", "\n")
-    plain = plain.replace("</div>", "")
-
-    # For convenience: Fix mathjax escaping (but only if the html is generated)
-    if generated:
-        plain = plain.replace(r"\[", r"[")
-        plain = plain.replace(r"\]", r"]")
-        plain = plain.replace(r"\(", r"(")
-        plain = plain.replace(r"\)", r")")
-
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    return plain.strip()
-
-
-
-
-
-
-
-

Functions

-
-
-def html_to_screen(html: str) ‑> str -
-
-

Convert html for printing to screen.

-
- -Expand source code - -
@beartype
-def html_to_screen(html: str) -> str:
-    """Convert html for printing to screen."""
-    html = re.sub(r"\<style\>.*\<\/style\>", "", html, flags=re.S)
-
-    generated = GENERATED_HTML_SENTINEL in html
-    if generated:
-        plain = html_to_markdown(html)
-        if html != markdown_to_html(plain):
-            html_clean = re.sub(r' data-original-markdown="[^"]*"', "", html)
-            plain += (
-                "\n\n### Current HTML → Markdown\n"
-                f"{markdownify.markdownify(html_clean)}"
-            )
-            plain += f"\n### Current HTML\n{html_clean}"
-    else:
-        plain = html
-
-    # For convenience: Un-escape some common LaTeX constructs
-    plain = plain.replace(r"\\\\", r"\\")
-    plain = plain.replace(r"\\{", r"\{")
-    plain = plain.replace(r"\\}", r"\}")
-    plain = plain.replace(r"\*}", r"*}")
-
-    plain = plain.replace(r"&lt;", "<")
-    plain = plain.replace(r"&gt;", ">")
-    plain = plain.replace(r"&amp;", "&")
-    plain = plain.replace(r"&nbsp;", " ")
-
-    plain = plain.replace("<br>", "\n")
-    plain = plain.replace("<br/>", "\n")
-    plain = plain.replace("<br />", "\n")
-    plain = plain.replace("<div>", "\n")
-    plain = plain.replace("</div>", "")
-
-    # For convenience: Fix mathjax escaping (but only if the html is generated)
-    if generated:
-        plain = plain.replace(r"\[", r"[")
-        plain = plain.replace(r"\]", r"]")
-        plain = plain.replace(r"\(", r"(")
-        plain = plain.replace(r"\)", r")")
-
-    plain = re.sub(r"\<b\>\s*\<\/b\>", "", plain)
-    return plain.strip()
-
-
-
-
-
-

Classes

-
-
-class KiNote -(a: apy.anki.Anki, note: anki.notes.Note) -
-
-

A subclass of apy.Note for applying transformations to HTML in note -fields. This is distinct from the anki Note class, which is accessible -using self.n.

-

Parameters

-
-
a : apy.anki.Anki
-
Wrapper around Anki collection.
-
note : anki.notes.Note
-
Anki Note instance.
-
-
- -Expand source code - -
class KiNote(Note):
-    """
-    A subclass of ``apy.Note`` for applying transformations to HTML in note
-    fields. This is distinct from the anki ``Note`` class, which is accessible
-    using ``self.n``.
-
-    Parameters
-    ----------
-    a : apy.anki.Anki
-        Wrapper around Anki collection.
-    note : anki.notes.Note
-        Anki Note instance.
-    """
-
-    @beartype
-    def __init__(self, a: Anki, note: anki.notes.Note):
-
-        super().__init__(a, note)
-
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        self.deck = self.a.col.decks.name(self.n.cards()[0].did)
-
-        # Populate parsed fields.
-        self.fields: Dict[str, str] = {}
-        for key, field in self.n.items():
-            self.fields[key] = html_to_screen(field)
-
-    @beartype
-    def __repr__(self) -> str:
-        """Convert note to Markdown format"""
-        lines = self.get_header_lines()
-
-        for key, field in self.n.items():
-            lines.append("### " + key)
-            lines.append(html_to_screen(field))
-            lines.append("")
-
-        return "\n".join(lines)
-
-    @beartype
-    def get_header_lines(self) -> List[str]:
-        """Get header of markdown representation of note."""
-        lines = [
-            "## Note",
-            f"nid: {self.n.id}",
-            f"model: {self.model_name}",
-        ]
-
-        lines += [f"deck: {self.get_deck()}"]
-        lines += [f"tags: {self.get_tag_string()}"]
-
-        if not any(GENERATED_HTML_SENTINEL in field for field in self.n.values()):
-            lines += ["markdown: false"]
-
-        lines += [""]
-        return lines
-
-    @beartype
-    def get_deck(self) -> str:
-        """Return which deck the note belongs to."""
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        return self.deck
-
-    @beartype
-    def set_deck(self, deck: str) -> None:
-        """Move note to deck."""
-        newdid = self.a.col.decks.id(deck)
-        cids = [c.id for c in self.n.cards()]
-
-        if cids:
-            self.a.col.set_deck(cids, newdid)
-            self.a.modified = True
-        # TODO: Remove implicit assumption that all cards are in the same deck.
-        self.deck = self.a.col.decks.name(self.n.cards()[0].did)
-
-

Ancestors

-
    -
  • apy.note.Note
  • -
-

Methods

-
-
-def get_deck(self) ‑> str -
-
-

Return which deck the note belongs to.

-
- -Expand source code - -
@beartype
-def get_deck(self) -> str:
-    """Return which deck the note belongs to."""
-    # TODO: Remove implicit assumption that all cards are in the same deck.
-    return self.deck
-
-
-
-def get_header_lines(self) ‑> list[str] -
-
-

Get header of markdown representation of note.

-
- -Expand source code - -
@beartype
-def get_header_lines(self) -> List[str]:
-    """Get header of markdown representation of note."""
-    lines = [
-        "## Note",
-        f"nid: {self.n.id}",
-        f"model: {self.model_name}",
-    ]
-
-    lines += [f"deck: {self.get_deck()}"]
-    lines += [f"tags: {self.get_tag_string()}"]
-
-    if not any(GENERATED_HTML_SENTINEL in field for field in self.n.values()):
-        lines += ["markdown: false"]
-
-    lines += [""]
-    return lines
-
-
-
-def set_deck(self, deck: str) ‑> None -
-
-

Move note to deck.

-
- -Expand source code - -
@beartype
-def set_deck(self, deck: str) -> None:
-    """Move note to deck."""
-    newdid = self.a.col.decks.id(deck)
-    cids = [c.id for c in self.n.cards()]
-
-    if cids:
-        self.a.col.set_deck(cids, newdid)
-        self.a.modified = True
-    # TODO: Remove implicit assumption that all cards are in the same deck.
-    self.deck = self.a.col.decks.name(self.n.cards()[0].did)
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/safe.html b/docs/safe.html deleted file mode 100644 index ecf2943a..00000000 --- a/docs/safe.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - -ki.safe API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.safe

-
-
-

A function decorator for use with functions returning result.Result.

-
- -Expand source code - -
"""A function decorator for use with functions returning ``result.Result``."""
-import random
-import functools
-import itertools
-from result import Ok, Err
-from beartype import beartype
-from beartype.typing import Callable, TypeVar, NoReturn
-
-from beartype.roar._roarexc import (
-    BeartypeCallHintReturnViolation,
-    _BeartypeCallHintPepRaiseException,
-)
-from beartype._decor._error._errorsleuth import CauseSleuth
-from beartype._util.text.utiltextlabel import (
-    prefix_callable_decorated_return_value,
-)
-from beartype._util.hint.utilhinttest import die_unless_hint
-from beartype._util.text.utiltextmunge import suffix_unless_suffixed
-
-# pylint: disable=invalid-name
-
-T = TypeVar("T")
-
-PITH_NAME = "return"
-
-
-@beartype
-def raise_if_return_type_exception(
-    func: Callable[[...], T],
-    exception_prefix: str,
-    pith_value: object,
-    hint: object,
-    helper: str,
-) -> None:
-    """
-    Typecheck the return value of a function decorated with ``@safe``.
-
-    Raise an error if ``pith_value`` doesn't match the type specified by
-    ``hint``. This is a snippet copied from the internal implementation of
-    beartype, and so it should be pinned to a version to avoid breakage when
-    the private API inevitably changes.
-    """
-
-    # If this is *NOT* the PEP 484-compliant "typing.NoReturn" type hint
-    # permitted *ONLY* as a return annotation, this is a standard type hint
-    # generally supported by both parameters and return values. In this case...
-    if hint is not NoReturn:
-        # If type hint is *NOT* a supported type hint, raise an exception.
-        die_unless_hint(hint=hint, exception_prefix=exception_prefix)
-        # Else, this type hint is supported.
-
-    # Human-readable string describing the failure of this pith to satisfy this
-    # hint if this pith fails to satisfy this hint *OR* "None" otherwise (i.e.,
-    # if this pith satisfies this hint).
-    exception_cause = CauseSleuth(
-        func=func,
-        pith=pith_value,
-        hint=hint,
-        cause_indent="",
-        exception_prefix=exception_prefix,
-        random_int=random.getrandbits(32),
-    ).get_cause_or_none()
-
-    # If this pith does *NOT* satisfy this hint...
-    if exception_cause:
-        # This failure suffixed by a period if *NOT* yet suffixed by a period.
-        exception_cause_suffixed = suffix_unless_suffixed(
-            text=exception_cause, suffix="."
-        )
-
-        # Raise an exception of the desired class embedding this cause.
-        raise BeartypeCallHintReturnViolation(  # type: ignore[misc]
-            f"{exception_prefix}violates {helper} type hint {repr(hint)}, as "
-            f"{exception_cause_suffixed}"
-        )
-
-
-@beartype
-def safe(func: Callable[[...], T]) -> Callable[[...], T]:
-    """A function decorator to chain functions that return a ``result.Result``."""
-
-    @functools.wraps(func)
-    def decorated(*args, **kwargs) -> T:
-        """The decorated version of ``func``."""
-        # If any of the arguments are Err, return early.
-        for arg in itertools.chain(args, kwargs.values()):
-            if isinstance(arg, Err):
-                return arg
-
-        # Unpack arguments of type ``Ok`` passed to ``func()``. We pass the
-        # modified versions of ``args``, ``kwargs`` to ``func()`` because if we
-        # do not do this unpacking step, functions that are @beartype decorated
-        # underneath the @safe decorator will ALWAYS fail to typecheck, since
-        # they are expected whatever type hint the user provided, but they are
-        # (possibly) receiving ``OkErr`` types.
-        unpack = lambda arg: arg.value if isinstance(arg, Ok) else arg
-        args = tuple(map(unpack, args))
-        kwargs = {key: unpack(arg) for key, arg in kwargs.items()}
-
-        # Call function with the OkErr-unpacked ``args`` and ``kwargs``.
-        result = func(*args, **kwargs)
-
-        # Human-readable label describing the unpacked return value.
-        exception_prefix: str = prefix_callable_decorated_return_value(
-            func=func, return_value=result.value
-        )
-
-        # If the return value is unannotated, raise an exception.
-        if PITH_NAME not in func.__annotations__:
-            raise _BeartypeCallHintPepRaiseException(f"{exception_prefix}unannotated.")
-
-        # Unfold the return type annotation of ``func``, extracting the ``Ok``
-        # return type hint and the ``Err`` return type hint.
-        ok_hint, err_hint = func.__annotations__[PITH_NAME].__args__
-        ok_ret_hint = ok_hint.__args__[0]
-        err_ret_hint = err_hint.__args__[0]
-
-        # Check return type, raising an error if the check fails. These calls
-        # are no-ops otherwise.
-        if isinstance(result, Ok):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, ok_ret_hint, "Ok"
-            )
-        elif isinstance(result, Err):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, err_ret_hint, "Err"
-            )
-
-        # Return the un-unpacked result, since this is what the user expects
-        # (``func()`` is decorated with @safe, and so it is intended to return
-        # a value of type ``result.Result``).
-        return result
-
-    return decorated
-
-
-
-
-
-
-
-

Functions

-
-
-def raise_if_return_type_exception(func: collections.abc.Callable[..., ~T], exception_prefix: str, pith_value: object, hint: object, helper: str) ‑> None -
-
-

Typecheck the return value of a function decorated with @safe.

-

Raise an error if pith_value doesn't match the type specified by -hint. This is a snippet copied from the internal implementation of -beartype, and so it should be pinned to a version to avoid breakage when -the private API inevitably changes.

-
- -Expand source code - -
@beartype
-def raise_if_return_type_exception(
-    func: Callable[[...], T],
-    exception_prefix: str,
-    pith_value: object,
-    hint: object,
-    helper: str,
-) -> None:
-    """
-    Typecheck the return value of a function decorated with ``@safe``.
-
-    Raise an error if ``pith_value`` doesn't match the type specified by
-    ``hint``. This is a snippet copied from the internal implementation of
-    beartype, and so it should be pinned to a version to avoid breakage when
-    the private API inevitably changes.
-    """
-
-    # If this is *NOT* the PEP 484-compliant "typing.NoReturn" type hint
-    # permitted *ONLY* as a return annotation, this is a standard type hint
-    # generally supported by both parameters and return values. In this case...
-    if hint is not NoReturn:
-        # If type hint is *NOT* a supported type hint, raise an exception.
-        die_unless_hint(hint=hint, exception_prefix=exception_prefix)
-        # Else, this type hint is supported.
-
-    # Human-readable string describing the failure of this pith to satisfy this
-    # hint if this pith fails to satisfy this hint *OR* "None" otherwise (i.e.,
-    # if this pith satisfies this hint).
-    exception_cause = CauseSleuth(
-        func=func,
-        pith=pith_value,
-        hint=hint,
-        cause_indent="",
-        exception_prefix=exception_prefix,
-        random_int=random.getrandbits(32),
-    ).get_cause_or_none()
-
-    # If this pith does *NOT* satisfy this hint...
-    if exception_cause:
-        # This failure suffixed by a period if *NOT* yet suffixed by a period.
-        exception_cause_suffixed = suffix_unless_suffixed(
-            text=exception_cause, suffix="."
-        )
-
-        # Raise an exception of the desired class embedding this cause.
-        raise BeartypeCallHintReturnViolation(  # type: ignore[misc]
-            f"{exception_prefix}violates {helper} type hint {repr(hint)}, as "
-            f"{exception_cause_suffixed}"
-        )
-
-
-
-def safe(func: collections.abc.Callable[..., ~T]) ‑> collections.abc.Callable[..., ~T] -
-
-

A function decorator to chain functions that return a result.Result.

-
- -Expand source code - -
@beartype
-def safe(func: Callable[[...], T]) -> Callable[[...], T]:
-    """A function decorator to chain functions that return a ``result.Result``."""
-
-    @functools.wraps(func)
-    def decorated(*args, **kwargs) -> T:
-        """The decorated version of ``func``."""
-        # If any of the arguments are Err, return early.
-        for arg in itertools.chain(args, kwargs.values()):
-            if isinstance(arg, Err):
-                return arg
-
-        # Unpack arguments of type ``Ok`` passed to ``func()``. We pass the
-        # modified versions of ``args``, ``kwargs`` to ``func()`` because if we
-        # do not do this unpacking step, functions that are @beartype decorated
-        # underneath the @safe decorator will ALWAYS fail to typecheck, since
-        # they are expected whatever type hint the user provided, but they are
-        # (possibly) receiving ``OkErr`` types.
-        unpack = lambda arg: arg.value if isinstance(arg, Ok) else arg
-        args = tuple(map(unpack, args))
-        kwargs = {key: unpack(arg) for key, arg in kwargs.items()}
-
-        # Call function with the OkErr-unpacked ``args`` and ``kwargs``.
-        result = func(*args, **kwargs)
-
-        # Human-readable label describing the unpacked return value.
-        exception_prefix: str = prefix_callable_decorated_return_value(
-            func=func, return_value=result.value
-        )
-
-        # If the return value is unannotated, raise an exception.
-        if PITH_NAME not in func.__annotations__:
-            raise _BeartypeCallHintPepRaiseException(f"{exception_prefix}unannotated.")
-
-        # Unfold the return type annotation of ``func``, extracting the ``Ok``
-        # return type hint and the ``Err`` return type hint.
-        ok_hint, err_hint = func.__annotations__[PITH_NAME].__args__
-        ok_ret_hint = ok_hint.__args__[0]
-        err_ret_hint = err_hint.__args__[0]
-
-        # Check return type, raising an error if the check fails. These calls
-        # are no-ops otherwise.
-        if isinstance(result, Ok):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, ok_ret_hint, "Ok"
-            )
-        elif isinstance(result, Err):
-            raise_if_return_type_exception(
-                func, exception_prefix, result.value, err_ret_hint, "Err"
-            )
-
-        # Return the un-unpacked result, since this is what the user expects
-        # (``func()`` is decorated with @safe, and so it is intended to return
-        # a value of type ``result.Result``).
-        return result
-
-    return decorated
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/transformer.html b/docs/transformer.html deleted file mode 100644 index 7819251d..00000000 --- a/docs/transformer.html +++ /dev/null @@ -1,805 +0,0 @@ - - - - - - -ki.transformer API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.transformer

-
-
-

A Lark transformer for the ki note grammar.

-
- -Expand source code - -
#!/usr/bin/env python3
-"""A Lark transformer for the ki note grammar."""
-import re
-from dataclasses import dataclass
-
-from lark import Transformer
-from lark.lexer import Token
-
-from beartype import beartype
-from beartype.typing import (
-    List,
-    Dict,
-    Optional,
-    Union,
-)
-
-# pylint: disable=invalid-name, too-few-public-methods
-
-BACKTICKS = "```\n"
-
-
-@beartype
-@dataclass(frozen=True)
-class Field:
-    """Field content pair."""
-
-    title: str
-    content: str
-
-
-@beartype
-@dataclass(frozen=True)
-class Header:
-    """Note metadata."""
-
-    title: str
-    guid: str
-    model: str
-
-
-@beartype
-@dataclass(frozen=True)
-class FlatNote:
-    """Flat (as possible) representation of a note."""
-
-    title: str
-    guid: str
-    model: str
-    tags: List[str]
-    fields: Dict[str, str]
-
-
-class NoteTransformer(Transformer):
-    r"""
-    note
-      header
-        title     Note
-        guid: 123412341234
-
-        notetype: Basic
-
-        tags      None
-
-
-      field
-        fieldheader
-          ###
-          Front
-        r
-
-
-      field
-        fieldheader
-          ###
-          Back
-        s
-    """
-    # pylint: disable=missing-function-docstring
-
-    @beartype
-    def note(self, n: List[Union[Header, List[str], Field]]) -> FlatNote:
-        assert len(n) >= 3
-
-        header = n[0]
-        tags = n[1]
-        fields = n[2:]
-        assert isinstance(header, Header)
-        assert isinstance(fields[0], Field)
-
-        # We drop the first character because it is a newline.
-        fieldmap: Dict[str, str] = {}
-        for field in fields:
-            fieldmap[field.title] = field.content[1:]
-
-        return FlatNote(
-            title=header.title,
-            guid=header.guid,
-            model=header.model,
-            tags=tags,
-            fields=fieldmap,
-        )
-
-    @beartype
-    def header(self, h: List[str]) -> Header:
-        h = filter(lambda s: s != BACKTICKS, h)
-        return Header(*h)
-
-    @beartype
-    def title(self, t: List[str]) -> str:
-        """``title: "##" TITLENAME "\n"+``"""
-        assert len(t) == 1
-        return t[0]
-
-    @beartype
-    def tags(self, tags: List[Optional[str]]) -> List[str]:
-        tags = filter(lambda t: t != BACKTICKS, tags)
-        return [tag for tag in tags if tag is not None]
-
-    @beartype
-    def field(self, f: List[str]) -> Field:
-        assert len(f) >= 1
-        fheader = f[0]
-        lines = f[1:]
-        content = "".join(lines)
-        if content[-2:] != "\n\n":
-            raise RuntimeError(
-                f"Nonterminating fields must have >= 1 trailing empty line:\n{content}"
-            )
-        return Field(fheader, content[:-1])
-
-    @beartype
-    def lastfield(self, f: List[str]) -> Field:
-        assert len(f) >= 1
-        fheader = f[0]
-        lines = f[1:]
-        content = "".join(lines)
-        if len(content) > 0 and content[-1] == "\n":
-            content = content[:-1]
-        return Field(fheader, content)
-
-    @beartype
-    def fieldheader(self, f: List[str]) -> str:
-        """``fieldheader: "##" " "* ANKINAME "\n"+``"""
-        assert len(f) == 1
-        return f[0]
-
-    @beartype
-    def GUID(self, t: Token) -> str:
-        """Possibly empty for new markdown notes."""
-        return re.sub(r"^guid:", "", str(t)).strip()
-
-    @beartype
-    def NOTETYPE(self, t: Token) -> str:
-        model = re.sub(r"^notetype:", "", str(t)).strip()
-        return model
-
-    @beartype
-    def FIELDLINE(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TITLENAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def ANKINAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TAGNAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TRIPLEBACKTICKS(self, t: Token) -> str:
-        return str(t)
-
-
-
-
-
-
-
-
-
-

Classes

-
-
-class Field -(title: str, content: str) -
-
-

Field content pair.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Field:
-    """Field content pair."""
-
-    title: str
-    content: str
-
-

Class variables

-
-
var content : str
-
-
-
-
var title : str
-
-
-
-
-
-
-class FlatNote -(title: str, guid: str, model: str, tags: list[str], fields: dict[str, str]) -
-
-

Flat (as possible) representation of a note.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class FlatNote:
-    """Flat (as possible) representation of a note."""
-
-    title: str
-    guid: str
-    model: str
-    tags: List[str]
-    fields: Dict[str, str]
-
-

Class variables

-
-
var fields : dict[str, str]
-
-
-
-
var guid : str
-
-
-
-
var model : str
-
-
-
-
var tags : list[str]
-
-
-
-
var title : str
-
-
-
-
-
-
-class Header -(title: str, guid: str, model: str) -
-
-

Note metadata.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Header:
-    """Note metadata."""
-
-    title: str
-    guid: str
-    model: str
-
-

Class variables

-
-
var guid : str
-
-
-
-
var model : str
-
-
-
-
var title : str
-
-
-
-
-
-
-class NoteTransformer -(visit_tokens: bool = True) -
-
-

note -header -title -Note -guid: 123412341234

-
notetype: Basic
-
-tags      None
-
-

field -fieldheader -### -Front -r

-

field -fieldheader -### -Back -s

-
- -Expand source code - -
class NoteTransformer(Transformer):
-    r"""
-    note
-      header
-        title     Note
-        guid: 123412341234
-
-        notetype: Basic
-
-        tags      None
-
-
-      field
-        fieldheader
-          ###
-          Front
-        r
-
-
-      field
-        fieldheader
-          ###
-          Back
-        s
-    """
-    # pylint: disable=missing-function-docstring
-
-    @beartype
-    def note(self, n: List[Union[Header, List[str], Field]]) -> FlatNote:
-        assert len(n) >= 3
-
-        header = n[0]
-        tags = n[1]
-        fields = n[2:]
-        assert isinstance(header, Header)
-        assert isinstance(fields[0], Field)
-
-        # We drop the first character because it is a newline.
-        fieldmap: Dict[str, str] = {}
-        for field in fields:
-            fieldmap[field.title] = field.content[1:]
-
-        return FlatNote(
-            title=header.title,
-            guid=header.guid,
-            model=header.model,
-            tags=tags,
-            fields=fieldmap,
-        )
-
-    @beartype
-    def header(self, h: List[str]) -> Header:
-        h = filter(lambda s: s != BACKTICKS, h)
-        return Header(*h)
-
-    @beartype
-    def title(self, t: List[str]) -> str:
-        """``title: "##" TITLENAME "\n"+``"""
-        assert len(t) == 1
-        return t[0]
-
-    @beartype
-    def tags(self, tags: List[Optional[str]]) -> List[str]:
-        tags = filter(lambda t: t != BACKTICKS, tags)
-        return [tag for tag in tags if tag is not None]
-
-    @beartype
-    def field(self, f: List[str]) -> Field:
-        assert len(f) >= 1
-        fheader = f[0]
-        lines = f[1:]
-        content = "".join(lines)
-        if content[-2:] != "\n\n":
-            raise RuntimeError(
-                f"Nonterminating fields must have >= 1 trailing empty line:\n{content}"
-            )
-        return Field(fheader, content[:-1])
-
-    @beartype
-    def lastfield(self, f: List[str]) -> Field:
-        assert len(f) >= 1
-        fheader = f[0]
-        lines = f[1:]
-        content = "".join(lines)
-        if len(content) > 0 and content[-1] == "\n":
-            content = content[:-1]
-        return Field(fheader, content)
-
-    @beartype
-    def fieldheader(self, f: List[str]) -> str:
-        """``fieldheader: "##" " "* ANKINAME "\n"+``"""
-        assert len(f) == 1
-        return f[0]
-
-    @beartype
-    def GUID(self, t: Token) -> str:
-        """Possibly empty for new markdown notes."""
-        return re.sub(r"^guid:", "", str(t)).strip()
-
-    @beartype
-    def NOTETYPE(self, t: Token) -> str:
-        model = re.sub(r"^notetype:", "", str(t)).strip()
-        return model
-
-    @beartype
-    def FIELDLINE(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TITLENAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def ANKINAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TAGNAME(self, t: Token) -> str:
-        return str(t)
-
-    @beartype
-    def TRIPLEBACKTICKS(self, t: Token) -> str:
-        return str(t)
-
-

Ancestors

-
    -
  • lark.visitors.Transformer
  • -
  • lark.visitors._Decoratable
  • -
  • abc.ABC
  • -
  • typing.Generic
  • -
-

Methods

-
-
-def ANKINAME(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def ANKINAME(self, t: Token) -> str:
-    return str(t)
-
-
-
-def FIELDLINE(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def FIELDLINE(self, t: Token) -> str:
-    return str(t)
-
-
-
-def GUID(self, t: lark.lexer.Token) ‑> str -
-
-

Possibly empty for new markdown notes.

-
- -Expand source code - -
@beartype
-def GUID(self, t: Token) -> str:
-    """Possibly empty for new markdown notes."""
-    return re.sub(r"^guid:", "", str(t)).strip()
-
-
-
-def NOTETYPE(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def NOTETYPE(self, t: Token) -> str:
-    model = re.sub(r"^notetype:", "", str(t)).strip()
-    return model
-
-
-
-def TAGNAME(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def TAGNAME(self, t: Token) -> str:
-    return str(t)
-
-
-
-def TITLENAME(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def TITLENAME(self, t: Token) -> str:
-    return str(t)
-
-
-
-def TRIPLEBACKTICKS(self, t: lark.lexer.Token) ‑> str -
-
-
-
- -Expand source code - -
@beartype
-def TRIPLEBACKTICKS(self, t: Token) -> str:
-    return str(t)
-
-
-
-def field(self, f: list[str]) ‑> Field -
-
-
-
- -Expand source code - -
@beartype
-def field(self, f: List[str]) -> Field:
-    assert len(f) >= 1
-    fheader = f[0]
-    lines = f[1:]
-    content = "".join(lines)
-    if content[-2:] != "\n\n":
-        raise RuntimeError(
-            f"Nonterminating fields must have >= 1 trailing empty line:\n{content}"
-        )
-    return Field(fheader, content[:-1])
-
-
-
-def fieldheader(self, f: list[str]) ‑> str -
-
-

fieldheader: "##" " "* ANKINAME " -"+

-
- -Expand source code - -
@beartype
-def fieldheader(self, f: List[str]) -> str:
-    """``fieldheader: "##" " "* ANKINAME "\n"+``"""
-    assert len(f) == 1
-    return f[0]
-
-
-
-def header(self, h: list[str]) ‑> Header -
-
-
-
- -Expand source code - -
@beartype
-def header(self, h: List[str]) -> Header:
-    h = filter(lambda s: s != BACKTICKS, h)
-    return Header(*h)
-
-
-
-def lastfield(self, f: list[str]) ‑> Field -
-
-
-
- -Expand source code - -
@beartype
-def lastfield(self, f: List[str]) -> Field:
-    assert len(f) >= 1
-    fheader = f[0]
-    lines = f[1:]
-    content = "".join(lines)
-    if len(content) > 0 and content[-1] == "\n":
-        content = content[:-1]
-    return Field(fheader, content)
-
-
-
-def note(self, n: list[typing.Union[Header, list[str], Field]]) ‑> FlatNote -
-
-
-
- -Expand source code - -
@beartype
-def note(self, n: List[Union[Header, List[str], Field]]) -> FlatNote:
-    assert len(n) >= 3
-
-    header = n[0]
-    tags = n[1]
-    fields = n[2:]
-    assert isinstance(header, Header)
-    assert isinstance(fields[0], Field)
-
-    # We drop the first character because it is a newline.
-    fieldmap: Dict[str, str] = {}
-    for field in fields:
-        fieldmap[field.title] = field.content[1:]
-
-    return FlatNote(
-        title=header.title,
-        guid=header.guid,
-        model=header.model,
-        tags=tags,
-        fields=fieldmap,
-    )
-
-
-
-def tags(self, tags: list[typing.Optional[str]]) ‑> list[str] -
-
-
-
- -Expand source code - -
@beartype
-def tags(self, tags: List[Optional[str]]) -> List[str]:
-    tags = filter(lambda t: t != BACKTICKS, tags)
-    return [tag for tag in tags if tag is not None]
-
-
-
-def title(self, t: list[str]) ‑> str -
-
-

title: "##" TITLENAME " -"+

-
- -Expand source code - -
@beartype
-def title(self, t: List[str]) -> str:
-    """``title: "##" TITLENAME "\n"+``"""
-    assert len(t) == 1
-    return t[0]
-
-
-
-
-
-
-
- -
- - - \ No newline at end of file diff --git a/docs/types.html b/docs/types.html deleted file mode 100644 index 1d21ed83..00000000 --- a/docs/types.html +++ /dev/null @@ -1,3428 +0,0 @@ - - - - - - -ki.types API documentation - - - - - - - - - - - - - - -
-
-
-

Module ki.types

-
-
-

Types for ki.

-
- -Expand source code - -
#!/usr/bin/env python3
-"""Types for ki."""
-import json
-import sqlite3
-import textwrap
-import dataclasses
-from enum import Enum
-from pathlib import Path
-from dataclasses import dataclass
-
-import git
-import whatthepatch
-from anki.decks import DeckTreeNode
-from anki.collection import Note, Card
-
-from beartype import beartype
-from beartype.typing import List, Dict, Any, Optional, Union
-
-# pylint: disable=too-many-lines, missing-class-docstring, too-many-instance-attributes
-
-NotetypeDict = Dict[str, Any]
-MODELS_FILE = "models.json"
-HINT = (
-    "hint: Updates were rejected because the tip of your current branch is behind\n"
-    + "hint: the Anki remote collection. Integrate the remote changes (e.g.\n"
-    + "hint: 'ki pull ...') before pushing again."
-)
-ERROR_MESSAGE_WIDTH = 69
-DATABASE_LOCKED_MSG = "database is locked"
-DeckId = int
-
-
-# TYPES
-
-
-class File(type(Path())):
-    """UNSAFE: Indicates that file *was* extant when it was resolved."""
-
-
-class Dir(type(Path())):
-    """UNSAFE: Indicates that dir *was* extant when it was resolved."""
-
-
-class EmptyDir(Dir):
-    """UNSAFE: Indicates that dir *was* empty (and extant) when it was resolved."""
-
-
-class NoPath(type(Path())):
-    """UNSAFE: Indicates that path *was not* extant when it was resolved."""
-
-
-class Singleton(type(Path())):
-    """UNSAFE: A path consisting of a single component (e.g. `file`, not `dir/file`)."""
-
-
-class PseudoFile(type(Path())):
-    """
-    UNSAFE: Indicates that path was extant but weird (e.g. a device or socket)
-    when it was resolved.
-    """
-
-
-class Link(type(Path())):
-    """UNSAFE: Indicates that this path was a symlink when tested."""
-
-
-class NoFile(NoPath):
-    """A nonexistent file in an extant directory."""
-
-    @property
-    def parent(self):
-        return Dir(super().parent)
-
-
-# ENUMS
-
-
-class GitChangeType(Enum):
-    """Enum for git file change types."""
-
-    ADDED = "A"
-    DELETED = "D"
-    RENAMED = "R"
-    MODIFIED = "M"
-    TYPECHANGED = "T"
-
-
-class PushResult(Enum):
-    """Enum for `push()` return codes."""
-
-    NONTRIVIAL = "NONTRIVIAL"
-    UP_TO_DATE = "UP_TO_DATE"
-
-
-# DATACLASSES
-
-
-@beartype
-@dataclass(frozen=True)
-class Patch:
-    """Relative paths and a Diff object."""
-
-    a: Path
-    b: Path
-    diff: whatthepatch.patch.diffobj
-
-
-@beartype
-@dataclass(frozen=True)
-class DeckNote:
-    """Flat (as possible) representation of a note, but with deck."""
-
-    title: str
-    guid: str
-    deck: str
-    model: str
-    tags: List[str]
-    fields: Dict[str, str]
-
-
-@beartype
-@dataclass(frozen=True)
-class NoteMetadata:
-    """The nid, mod, and mid of a note."""
-
-    nid: int
-    mod: int
-    mid: int
-
-
-@beartype
-@dataclass(frozen=True)
-class Delta:
-    """
-    The git delta for a single file.
-
-    We don't instead store a root and a relative path, because we need the
-    `File` object to avoid making unnecessary syscalls to check that stuff
-    exists.
-    """
-
-    status: GitChangeType
-    path: File
-    relpath: Path
-
-
-@beartype
-@dataclass(frozen=True)
-class KiRepo:
-    """
-    UNSAFE: A ki repository, including:
-    - .ki/hashes
-    - .ki/config
-
-    Existence of collection path is guaranteed.
-    """
-
-    # pylint: disable=invalid-name
-
-    repo: git.Repo
-    root: Dir
-    ki: Dir
-    col_file: File
-    backups_dir: Dir
-    config_file: File
-    hashes_file: File
-    models_file: File
-
-
-@beartype
-@dataclass(frozen=True)
-class Field:
-    """A typechecked version of `anki.models.FieldDict` for use within ki."""
-
-    name: str
-    ord: Optional[int]
-
-
-@beartype
-@dataclass(frozen=True)
-class Template:
-    """A typechecked version of `anki.models.TemplateDict` for use within ki."""
-
-    name: str
-    qfmt: str
-    afmt: str
-    ord: Optional[int]
-
-
-@beartype
-@dataclass(frozen=True)
-class Notetype:
-    """A typechecked version of `anki.models.NotetypeDict` for use within ki."""
-
-    # pylint: disable=invalid-name
-
-    id: int
-    name: str
-    type: int
-    flds: List[Field]
-    tmpls: List[Template]
-    sortf: Field
-
-    # A copy of the `NotetypeDict` object as it was returned from the Anki
-    # database. We keep this around to preserve extra keys that may not always
-    # exist, but the ones above should be required for Anki to function.
-    dict: Dict[str, Any]
-
-
-@beartype
-@dataclass(frozen=True)
-class ColNote:
-    """A note that exists in the Anki DB."""
-
-    n: Note
-    new: bool
-    deck: str
-    title: str
-    markdown: bool
-    notetype: Notetype
-    sfld: str
-
-
-@beartype
-@dataclass(frozen=True)
-class KiRev:
-    """
-    UNSAFE: A repo-commit pair, where `sha` is guaranteed to be an extant
-    commit hash of `repo`.
-    """
-
-    kirepo: KiRepo
-    sha: str
-
-
-@beartype
-@dataclass(frozen=True)
-class Rev:
-    """
-    UNSAFE: A repo-commit pair, where `sha` is guaranteed to be an extant
-    commit hash of `repo`.
-    """
-
-    repo: git.Repo
-    sha: str
-
-
-@beartype
-@dataclass(frozen=True)
-class CardFile:
-    """A card written to disk, either as a link or a file."""
-
-    card: Card
-    file: File
-
-
-@beartype
-@dataclass(frozen=True)
-class Deck:
-    did: DeckId
-    node: DeckTreeNode
-    deckd: Dir
-    mediad: Dir
-    children: List["Deck"]
-    fullname: str
-
-
-@beartype
-@dataclass(frozen=True)
-class Root:
-    did: DeckId
-    node: DeckTreeNode
-    deckd: None
-    mediad: None
-    children: List[Deck]
-    fullname: str
-
-
-@beartype
-@dataclass(frozen=True)
-class PlannedLink:
-    """A not-yet-created symlink path and its extant target."""
-
-    link: NoFile
-    tgt: Union[File, Link]
-
-
-@beartype
-@dataclass(frozen=True)
-class DotKi:
-    config: File
-    backups: EmptyDir
-
-
-@beartype
-@dataclass(frozen=True)
-class Submodule:
-    sm: git.Submodule
-    sm_repo: git.Repo
-    rel_root: Path
-    branch: str
-
-
-@beartype
-@dataclass(frozen=True)
-class MediaBytes:
-    """A media file, its old bytes (from collection) and new bytes (from file)."""
-
-    file: File
-    old: bytes
-    new: bytes
-
-
-@beartype
-@dataclass(frozen=True)
-class AddedMedia:
-    """An added media file and its (possibly changed) filename."""
-
-    file: File
-    new_name: str
-
-
-@beartype
-@dataclass(frozen=True)
-class NoteDBRow:
-    nid: int
-    guid: str
-    mid: int
-    mod: int
-    usn: int
-    tags: str
-    flds: str
-    sfld: Union[str, int]
-    csum: int
-    flags: int
-    data: str
-
-
-@beartype
-def notetype_json(notetype: Notetype) -> str:
-    """Return the JSON for a notetype as a string."""
-    dictionary: Dict[str, Any] = dataclasses.asdict(notetype)
-    dictionary.pop("id")
-    inner = dictionary["dict"]
-    inner.pop("id")
-    inner.pop("mod")
-    dictionary["dict"] = inner
-    return json.dumps(dictionary, sort_keys=True, indent=4)
-
-
-@beartype
-def nt_str(notetype: Notetype) -> str:
-    """Display a notetype and its JSON."""
-    # pylint: disable=invalid-name
-    s = notetype_json(notetype)
-    return f"JSON for '{notetype.id}':\n{s}"
-
-
-# EXCEPTIONS
-
-
-@beartype
-def errwrap(msg: str) -> str:
-    """Wrap an error message to a fixed width."""
-    out: str = textwrap.fill(textwrap.dedent(msg), width=ERROR_MESSAGE_WIDTH)
-    out = out.lstrip()
-    out = out.rstrip()
-    return out
-
-
-class MissingFileError(FileNotFoundError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        header = f"File not found: '{path}'"
-        msg = f"{info.rstrip()}"
-        super().__init__(f"{header}\n\n{errwrap(msg)}")
-
-
-class MissingDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = f"Directory not found: '{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-
-class ExpectedFileButGotDirectoryError(FileNotFoundError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A file was expected at this location, but got a directory: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-
-class ExpectedDirectoryButGotFileError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A directory was expected at this location, but got a file: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-
-class ExpectedEmptyDirectoryButGotNonEmptyDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "An empty directory was expected at this location, but it is nonempty: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-
-class StrangeExtantPathError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A normal file or directory was expected, but got a weird pseudofile "
-        msg += "(e.g. a socket, or a device): "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-
-class ExpectedNonexistentPathError(FileExistsError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        top = f"""
-        Expected this path not to exist, but it does: '{path}'{info.rstrip()}
-        """
-        msg = """
-        If the path is to the `.ki/` metadata directory, this error may have
-        been caused by a `.gitignore` file that does not include `.ki/` (this
-        metadata should not be tracked by git). Check if this pattern is
-        included in the `.gitignore` file, and if it is not included, try
-        adding it.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg)}")
-
-
-class NotKiRepoError(RuntimeError):
-    @beartype
-    def __init__(self):
-        msg = "fatal: not a ki repository (or any parent up to mount point /)\n"
-        msg += "Stopping at filesystem boundary."
-        super().__init__(errwrap(msg))
-
-
-class UpdatesRejectedError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File):
-        msg = f"Failed to push some commits to '{col_file}'\n{HINT}"
-        super().__init__(errwrap(msg))
-
-
-class TargetExistsError(RuntimeError):
-    @beartype
-    def __init__(self, target: Path):
-        msg = f"fatal: destination path '{target}' already exists and is "
-        msg += "not an empty directory."
-        super().__init__(errwrap(msg))
-
-
-class GitRefNotFoundError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo, sha: str):
-        msg = f"Repo at '{repo.working_dir}' doesn't contain rev '{sha}'"
-        super().__init__(errwrap(msg))
-
-
-class GitHeadRefNotFoundError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo, error: Exception):
-        msg = f"""
-        ValueError raised while trying to get rev 'HEAD' from repo at
-        '{repo.working_dir}': '{error}'. This may have occurred because there
-        are no commits in the current repository. However, this should never be
-        the case, because ki repositories must be instantiated with a 'ki clone
-        <collection>' command, and this command creates an initial commit.
-        """
-        super().__init__(errwrap(msg))
-
-
-class CollectionChecksumError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File):
-        msg = f"Checksum mismatch on {col_file}. Was file changed?"
-        super().__init__(errwrap(msg))
-
-
-class MissingNotetypeError(RuntimeError):
-    @beartype
-    def __init__(self, model: str):
-        msg = f"""
-        Notetype '{model}' doesn't exist. Create it in Anki before adding notes
-        via ki. This may be caused by a corrupted '{MODELS_FILE}' file. The
-        models file must contain definitions for all models that appear in all
-        note files.
-        """
-        super().__init__(errwrap(msg))
-
-
-# TODO: We should also print which field ordinals *are* valid.
-class MissingFieldOrdinalError(RuntimeError):
-    # pylint: disable=redefined-builtin
-
-    @beartype
-    def __init__(self, ord: int, model: str):
-        msg = f"Field with ordinal {ord} missing from notetype '{model}'."
-        super().__init__(errwrap(msg))
-
-
-class MissingNoteIdError(RuntimeError):
-    @beartype
-    def __init__(self, nid: int):
-        msg = f"Failed to locate note with nid '{nid}' in Anki database."
-        super().__init__(errwrap(msg))
-
-
-class NotetypeMismatchError(RuntimeError):
-    @beartype
-    def __init__(self, decknote: DeckNote, new_notetype: Notetype):
-        msg = f"Notetype '{decknote.model}' "
-        msg += f"specified in DeckNote with GUID '{decknote.guid}' "
-        msg += f"does not match passed notetype '{new_notetype}'. "
-        msg += "This should NEVER happen, "
-        msg += "and indicates a bug in the caller to 'update_note()'."
-        super().__init__(errwrap(msg))
-
-
-class NotetypeKeyError(RuntimeError):
-    @beartype
-    def __init__(self, key: str, name: str):
-        msg = f"""
-        Expected key {key} not found in notetype '{name}' parsed from a
-        '{MODELS_FILE}' file in the current repository (may be contained in a
-        subdirectory).
-        """
-        super().__init__(errwrap(msg))
-
-
-class NoteFieldKeyError(RuntimeError):
-    @beartype
-    def __init__(self, key: str, nid: int):
-        msg = f"""
-        Expected field {key} not found in note '{nid}'. This should *never*
-        happen, and indicates a serious failure, since we only ever index
-        `anki.notes.Note` objects on names pulled from their own notetype
-        dictionary.
-        """
-        super().__init__(errwrap(msg))
-
-
-class UnnamedNotetypeError(RuntimeError):
-    @beartype
-    def __init__(self, nt: NotetypeDict):
-        msg = f"""
-        Failed to find 'name' field for a notetype while parsing
-        a '{MODELS_FILE}' file in the current repository (may be
-        contained in a subdirectory):
-        """
-        super().__init__(errwrap(msg) + "\n" + str(nt))
-
-
-class SQLiteLockError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File, err: sqlite3.DatabaseError):
-        if str(err) == DATABASE_LOCKED_MSG:
-            header = f"fatal: {DATABASE_LOCKED_MSG} (Anki must not be running)."
-            super().__init__(header)
-            return
-        header = "Unexpected SQLite3 error while attempting to acquire lock on file: "
-        header += f"'{col_file}':"
-        msg = f"""
-        A 'sqlite3.DatabaseError' was raised with error message: '{str(err)}'.
-        This may indicate that either the database file at the location
-        specified above is corrupted, or the config file at '.ki/config' is
-        pointing to the wrong location. (The latter may occur in the unlikely
-        event that the collection file in the Anki data directory has been
-        accidentally overwritten.)
-        """
-        super().__init__(f"{header}\n{errwrap(msg)}")
-
-
-class MissingMediaDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, col_path: str, media_dir: Path):
-        top = f"Missing or bad Anki collection media directory '{media_dir}' "
-        top += f"while processing collection '{col_path}':"
-        msg = """
-        This should *never* happen, as Anki generates a media directory at the
-        relevant location whenever a `Collection` object is instantiated.  It
-        is possible that the collection's containing directory was manually
-        tampered with, or an old version of Anki incompatible with ki is
-        installed.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class AnkiAlreadyOpenError(RuntimeError):
-    @beartype
-    def __init__(self, msg: str):
-        super().__init__(f"fatal: {msg}")
-
-
-class MissingTidyExecutableError(FileNotFoundError):
-    @beartype
-    def __init__(self, err: FileNotFoundError):
-        top = "Command not found: 'tidy' (Is 'html5-tidy' installed?)"
-        msg = f"Original exception: {err}"
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class AnkiDBNoteMissingFieldsError(RuntimeError):
-    @beartype
-    def __init__(self, decknote: DeckNote, nid: int, key: str):
-        top = f"fatal: Note with GUID '{decknote.guid}' missing DB field '{key}'"
-        msg = f"""
-        This is strange, should only happen if the `add_db_note()` call fails
-        or behaves strangely. This may indicate a bug in ki. Please report this
-        on GitHub at https://github.com/langfield/ki/issues. Note ID: '{nid}'.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg)}")
-
-
-class GitFileModeParseError(RuntimeError):
-    @beartype
-    def __init__(self, file: Path, out: str):
-        top = f"fatal: Failed to parse git file mode for media file '{file}'"
-        msg = """
-        A 'git ls-files' call is used to figure out the git file mode for
-        cloned media files. This is done in order to detect symlinks on
-        Windows, and follow them manually. This error is raised when we are
-        unable to parse the output of 'git ls-files' for some reason or
-        another, which for a symlink called 'filename', should look like this:
-        """
-        example = "120000 a35bd1f49b7b9225a76d052e9a35fb711a8646a6 0       filename"
-        msg2 = f"Actual unparsed git command output:\n{out}"
-        super().__init__(f"{top}\n\n{errwrap(msg)}\n\n{example}\n\n{msg2}")
-
-
-class NonEmptyWorkingTreeError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo):
-        top = "fatal: Non-empty working tree in freshly cloned repo at "
-        top += f"'{repo.working_dir}'"
-
-        msg = """
-        The working tree in a fresh clone should always be empty, and so if it
-        isn't, this means that some files were either errantly generated during
-        the clone process, or were not committed when they should have been.
-        This may indicate a bug in ki. Please report this on GitHub at
-        https://github.com/langfield/ki/issues.
-        """
-        details = "\nUntracked files:\n"
-        for untracked in repo.untracked_files:
-            details += f"  * {untracked}\n"
-        details += "\nChanged files:\n"
-        for item in repo.index.diff(None):
-            details += f"  * {item.b_path}\n"
-        super().__init__(f"{top}\n\n{errwrap(msg)}\n{details}")
-
-
-# WARNINGS
-
-
-class NoteFieldValidationWarning(Warning):
-    @beartype
-    def __init__(self, nid: int, field: str, notetype: Notetype):
-        top = f"Warning: Bad field '{field}' for notetype '{notetype}' in note '{nid}'"
-        msg = "Try correcting the field name or changing the notetype."
-        msg += f"The fields for the notetype '{notetype}' are:"
-        fields: List[str] = [field.name for field in notetype.flds]
-        listing: str = "  " + "\n  ".join(fields)
-        super().__init__(f"{top}\n{errwrap(msg)}\n{listing}")
-
-
-class WrongFieldCountWarning(Warning):
-    @beartype
-    def __init__(self, decknote: DeckNote, names: List[str]):
-        top = f"Warning: Wrong number of fields for model '{decknote.model}'"
-        msg = f"""
-        The notetype '{decknote.model}' takes '{len(names)}' fields, but got
-        '{len(decknote.fields.keys())}' for note with GUID '{decknote.guid}'.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class InconsistentFieldNamesWarning(Warning):
-    @beartype
-    def __init__(self, x: str, y: str, decknote: DeckNote):
-        top = f"Warning: Inconsistent field names ('{x}' != '{y}')"
-        msg = f"""
-        Expected a field '{x}' for notetype '{decknote.model}', but got a field
-        '{y}' in note with GUID '{decknote.guid}'.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class DeletedFileNotFoundWarning(Warning):
-    @beartype
-    def __init__(self, path: Path):
-        top = f"Deleted file not found in source commit: '{path}'"
-        msg = """
-        Unexpected: this may indicate a bug in ki. The source commit is what we
-        are diffing against, and so we expect all files whose change type is
-        'DELETED' to appear in a checkout of that reference. However, we return
-        a 'Warning' instead of an 'Exception' in order to avoid interrupting
-        the execution of a 'push()' call where it is not strictly necessary.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class DiffTargetFileNotFoundWarning(Warning):
-    @beartype
-    def __init__(self, path: Path):
-        top = f"Diff target file not found: '{path}'"
-        msg1 = """
-        Unexpected: this sometimes happens when a git repository is copied into
-        a subdirectory of a ki repository, and then added with 'git add'
-        instead of being added as a git submodule with 'git submodule add'. If
-        git displayed a warning on a recent 'git add' command, refer to the
-        hints within that warning.
-        """
-        msg2 = """
-        Otherwise, this may indicate a bug in ki.  The caller prevents this
-        warning from being instantiated unless the git change type is one of
-        'ADDED', 'MODIFIED', or 'RENAMED'. In all cases, the file being diffed
-        should be extant in the target commit of the repository.  However, we
-        return a 'Warning' instead of an 'Exception' in order to avoid
-        interrupting the execution of a 'push()' call where it is not strictly
-        necessary.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg1)}\n\n{errwrap(msg2)}")
-
-
-class RenamedMediaFileWarning(Warning):
-    @beartype
-    def __init__(self, src: str, dst: str):
-        top = f"Media file '{src}' renamed to '{dst}'"
-        msg = """
-        This happens when we push a media file to a collection that already
-        contains another media file with the same name. In this case, Anki does
-        some deduplication by renaming the new one.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class NotetypeCollisionWarning(Warning):
-    @beartype
-    def __init__(self, model: Notetype, existing: Notetype):
-        msg = """
-        Collision: new notetype '{model.name}' has same name as existing
-        notetype with mid '{existing.id}', but hashes differ.
-        """
-        super().__init__(f"{errwrap(msg)}\n\n{nt_str(model)}\n\n{nt_str(existing)}")
-
-
-class EmptyNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int):
-        top = f"Found empty note with nid '{note.id}'"
-        msg = f"""
-        Anki fields health check code: '{health}'
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class DuplicateNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int, rep: str):
-        top = "Failed to add duplicate note to collection"
-        msg = f"""
-        Notetype/fields of note with nid '{note.id}' are duplicate of existing note.
-        """
-        field = f"First field\n-----------\n{rep}"
-        code = f"Anki fields health check code: {health}"
-        super().__init__(f"{top}\n{errwrap(msg)}\n\n{field}\n\n{code}")
-
-
-class UnhealthyNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int):
-        top = f"Note with nid '{note.id}' failed fields check with unknown error code"
-        msg = f"""
-        Anki fields health check code: '{health}'
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-
-class MediaDirectoryDeckNameCollisionWarning(Warning):
-    @beartype
-    def __init__(self):
-        top = "Decks with name '_media' skipped as name is reserved"
-        super().__init__(f"{top}")
-
-
-
-
-
-
-
-

Functions

-
-
-def errwrap(msg: str) ‑> str -
-
-

Wrap an error message to a fixed width.

-
- -Expand source code - -
@beartype
-def errwrap(msg: str) -> str:
-    """Wrap an error message to a fixed width."""
-    out: str = textwrap.fill(textwrap.dedent(msg), width=ERROR_MESSAGE_WIDTH)
-    out = out.lstrip()
-    out = out.rstrip()
-    return out
-
-
-
-def notetype_json(notetype: Notetype) ‑> str -
-
-

Return the JSON for a notetype as a string.

-
- -Expand source code - -
@beartype
-def notetype_json(notetype: Notetype) -> str:
-    """Return the JSON for a notetype as a string."""
-    dictionary: Dict[str, Any] = dataclasses.asdict(notetype)
-    dictionary.pop("id")
-    inner = dictionary["dict"]
-    inner.pop("id")
-    inner.pop("mod")
-    dictionary["dict"] = inner
-    return json.dumps(dictionary, sort_keys=True, indent=4)
-
-
-
-def nt_str(notetype: Notetype) ‑> str -
-
-

Display a notetype and its JSON.

-
- -Expand source code - -
@beartype
-def nt_str(notetype: Notetype) -> str:
-    """Display a notetype and its JSON."""
-    # pylint: disable=invalid-name
-    s = notetype_json(notetype)
-    return f"JSON for '{notetype.id}':\n{s}"
-
-
-
-
-
-

Classes

-
-
-class AddedMedia -(file: File, new_name: str) -
-
-

An added media file and its (possibly changed) filename.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class AddedMedia:
-    """An added media file and its (possibly changed) filename."""
-
-    file: File
-    new_name: str
-
-

Class variables

-
-
var fileFile
-
-
-
-
var new_name : str
-
-
-
-
-
-
-class AnkiAlreadyOpenError -(msg: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class AnkiAlreadyOpenError(RuntimeError):
-    @beartype
-    def __init__(self, msg: str):
-        super().__init__(f"fatal: {msg}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class AnkiDBNoteMissingFieldsError -(decknote: DeckNote, nid: int, key: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class AnkiDBNoteMissingFieldsError(RuntimeError):
-    @beartype
-    def __init__(self, decknote: DeckNote, nid: int, key: str):
-        top = f"fatal: Note with GUID '{decknote.guid}' missing DB field '{key}'"
-        msg = f"""
-        This is strange, should only happen if the `add_db_note()` call fails
-        or behaves strangely. This may indicate a bug in ki. Please report this
-        on GitHub at https://github.com/langfield/ki/issues. Note ID: '{nid}'.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class CardFile -(card: anki.cards.Card, file: File) -
-
-

A card written to disk, either as a link or a file.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class CardFile:
-    """A card written to disk, either as a link or a file."""
-
-    card: Card
-    file: File
-
-

Class variables

-
-
var card : anki.cards.Card
-
-
-
-
var fileFile
-
-
-
-
-
-
-class ColNote -(n: anki.notes.Note, new: bool, deck: str, title: str, markdown: bool, notetype: Notetype, sfld: str) -
-
-

A note that exists in the Anki DB.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class ColNote:
-    """A note that exists in the Anki DB."""
-
-    n: Note
-    new: bool
-    deck: str
-    title: str
-    markdown: bool
-    notetype: Notetype
-    sfld: str
-
-

Class variables

-
-
var deck : str
-
-
-
-
var markdown : bool
-
-
-
-
var n : anki.notes.Note
-
-
-
-
var new : bool
-
-
-
-
var notetypeNotetype
-
-
-
-
var sfld : str
-
-
-
-
var title : str
-
-
-
-
-
-
-class CollectionChecksumError -(col_file: File) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class CollectionChecksumError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File):
-        msg = f"Checksum mismatch on {col_file}. Was file changed?"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Deck -(did: int, node: anki.decks_pb2.DeckTreeNode, deckd: Dir, mediad: Dir, children: list['Deck'], fullname: str) -
-
-

Deck(did: int, node: anki.decks_pb2.DeckTreeNode, deckd: ki.types.Dir, mediad: ki.types.Dir, children: list['Deck'], fullname: str)

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Deck:
-    did: DeckId
-    node: DeckTreeNode
-    deckd: Dir
-    mediad: Dir
-    children: List["Deck"]
-    fullname: str
-
-

Class variables

-
-
var children : list[Deck]
-
-
-
-
var deckdDir
-
-
-
-
var did : int
-
-
-
-
var fullname : str
-
-
-
-
var mediadDir
-
-
-
-
var node : anki.decks_pb2.DeckTreeNode
-
-
-
-
-
-
-class DeckNote -(title: str, guid: str, deck: str, model: str, tags: list[str], fields: dict[str, str]) -
-
-

Flat (as possible) representation of a note, but with deck.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class DeckNote:
-    """Flat (as possible) representation of a note, but with deck."""
-
-    title: str
-    guid: str
-    deck: str
-    model: str
-    tags: List[str]
-    fields: Dict[str, str]
-
-

Class variables

-
-
var deck : str
-
-
-
-
var fields : dict[str, str]
-
-
-
-
var guid : str
-
-
-
-
var model : str
-
-
-
-
var tags : list[str]
-
-
-
-
var title : str
-
-
-
-
-
-
-class DeletedFileNotFoundWarning -(path: pathlib.Path) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class DeletedFileNotFoundWarning(Warning):
-    @beartype
-    def __init__(self, path: Path):
-        top = f"Deleted file not found in source commit: '{path}'"
-        msg = """
-        Unexpected: this may indicate a bug in ki. The source commit is what we
-        are diffing against, and so we expect all files whose change type is
-        'DELETED' to appear in a checkout of that reference. However, we return
-        a 'Warning' instead of an 'Exception' in order to avoid interrupting
-        the execution of a 'push()' call where it is not strictly necessary.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Delta -(status: GitChangeType, path: File, relpath: pathlib.Path) -
-
-

The git delta for a single file.

-

We don't instead store a root and a relative path, because we need the -File object to avoid making unnecessary syscalls to check that stuff -exists.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Delta:
-    """
-    The git delta for a single file.
-
-    We don't instead store a root and a relative path, because we need the
-    `File` object to avoid making unnecessary syscalls to check that stuff
-    exists.
-    """
-
-    status: GitChangeType
-    path: File
-    relpath: Path
-
-

Class variables

-
-
var pathFile
-
-
-
-
var relpath : pathlib.Path
-
-
-
-
var statusGitChangeType
-
-
-
-
-
-
-class DiffTargetFileNotFoundWarning -(path: pathlib.Path) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class DiffTargetFileNotFoundWarning(Warning):
-    @beartype
-    def __init__(self, path: Path):
-        top = f"Diff target file not found: '{path}'"
-        msg1 = """
-        Unexpected: this sometimes happens when a git repository is copied into
-        a subdirectory of a ki repository, and then added with 'git add'
-        instead of being added as a git submodule with 'git submodule add'. If
-        git displayed a warning on a recent 'git add' command, refer to the
-        hints within that warning.
-        """
-        msg2 = """
-        Otherwise, this may indicate a bug in ki.  The caller prevents this
-        warning from being instantiated unless the git change type is one of
-        'ADDED', 'MODIFIED', or 'RENAMED'. In all cases, the file being diffed
-        should be extant in the target commit of the repository.  However, we
-        return a 'Warning' instead of an 'Exception' in order to avoid
-        interrupting the execution of a 'push()' call where it is not strictly
-        necessary.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg1)}\n\n{errwrap(msg2)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Dir -(*args, **kwargs) -
-
-

UNSAFE: Indicates that dir was extant when it was resolved.

-
- -Expand source code - -
class Dir(type(Path())):
-    """UNSAFE: Indicates that dir *was* extant when it was resolved."""
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-

Subclasses

- -
-
-class DotKi -(config: File, backups: EmptyDir) -
-
-

DotKi(config: ki.types.File, backups: ki.types.EmptyDir)

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class DotKi:
-    config: File
-    backups: EmptyDir
-
-

Class variables

-
-
var backupsEmptyDir
-
-
-
-
var configFile
-
-
-
-
-
-
-class DuplicateNoteWarning -(note: anki.notes.Note, health: int, rep: str) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class DuplicateNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int, rep: str):
-        top = "Failed to add duplicate note to collection"
-        msg = f"""
-        Notetype/fields of note with nid '{note.id}' are duplicate of existing note.
-        """
-        field = f"First field\n-----------\n{rep}"
-        code = f"Anki fields health check code: {health}"
-        super().__init__(f"{top}\n{errwrap(msg)}\n\n{field}\n\n{code}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class EmptyDir -(*args, **kwargs) -
-
-

UNSAFE: Indicates that dir was empty (and extant) when it was resolved.

-
- -Expand source code - -
class EmptyDir(Dir):
-    """UNSAFE: Indicates that dir *was* empty (and extant) when it was resolved."""
-
-

Ancestors

-
    -
  • Dir
  • -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-
-
-class EmptyNoteWarning -(note: anki.notes.Note, health: int) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class EmptyNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int):
-        top = f"Found empty note with nid '{note.id}'"
-        msg = f"""
-        Anki fields health check code: '{health}'
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class ExpectedDirectoryButGotFileError -(path: pathlib.Path, info: str = '') -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class ExpectedDirectoryButGotFileError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A directory was expected at this location, but got a file: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class ExpectedEmptyDirectoryButGotNonEmptyDirectoryError -(path: pathlib.Path, info: str = '') -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class ExpectedEmptyDirectoryButGotNonEmptyDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "An empty directory was expected at this location, but it is nonempty: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class ExpectedFileButGotDirectoryError -(path: pathlib.Path, info: str = '') -
-
-

File not found.

-
- -Expand source code - -
class ExpectedFileButGotDirectoryError(FileNotFoundError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A file was expected at this location, but got a directory: "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.FileNotFoundError
  • -
  • builtins.OSError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class ExpectedNonexistentPathError -(path: pathlib.Path, info: str = '') -
-
-

File already exists.

-
- -Expand source code - -
class ExpectedNonexistentPathError(FileExistsError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        top = f"""
-        Expected this path not to exist, but it does: '{path}'{info.rstrip()}
-        """
-        msg = """
-        If the path is to the `.ki/` metadata directory, this error may have
-        been caused by a `.gitignore` file that does not include `.ki/` (this
-        metadata should not be tracked by git). Check if this pattern is
-        included in the `.gitignore` file, and if it is not included, try
-        adding it.
-        """
-        super().__init__(f"{top}\n\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.FileExistsError
  • -
  • builtins.OSError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Field -(name: str, ord: Optional[int]) -
-
-

A typechecked version of anki.models.FieldDict for use within ki.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Field:
-    """A typechecked version of `anki.models.FieldDict` for use within ki."""
-
-    name: str
-    ord: Optional[int]
-
-

Class variables

-
-
var name : str
-
-
-
-
var ord : Optional[int]
-
-
-
-
-
-
-class File -(*args, **kwargs) -
-
-

UNSAFE: Indicates that file was extant when it was resolved.

-
- -Expand source code - -
class File(type(Path())):
-    """UNSAFE: Indicates that file *was* extant when it was resolved."""
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-
-
-class GitChangeType -(*args, **kwds) -
-
-

Enum for git file change types.

-
- -Expand source code - -
class GitChangeType(Enum):
-    """Enum for git file change types."""
-
-    ADDED = "A"
-    DELETED = "D"
-    RENAMED = "R"
-    MODIFIED = "M"
-    TYPECHANGED = "T"
-
-

Ancestors

-
    -
  • enum.Enum
  • -
-

Class variables

-
-
var ADDED
-
-
-
-
var DELETED
-
-
-
-
var MODIFIED
-
-
-
-
var RENAMED
-
-
-
-
var TYPECHANGED
-
-
-
-
-
-
-class GitFileModeParseError -(file: pathlib.Path, out: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class GitFileModeParseError(RuntimeError):
-    @beartype
-    def __init__(self, file: Path, out: str):
-        top = f"fatal: Failed to parse git file mode for media file '{file}'"
-        msg = """
-        A 'git ls-files' call is used to figure out the git file mode for
-        cloned media files. This is done in order to detect symlinks on
-        Windows, and follow them manually. This error is raised when we are
-        unable to parse the output of 'git ls-files' for some reason or
-        another, which for a symlink called 'filename', should look like this:
-        """
-        example = "120000 a35bd1f49b7b9225a76d052e9a35fb711a8646a6 0       filename"
-        msg2 = f"Actual unparsed git command output:\n{out}"
-        super().__init__(f"{top}\n\n{errwrap(msg)}\n\n{example}\n\n{msg2}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class GitHeadRefNotFoundError -(repo: git.repo.base.Repo, error: Exception) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class GitHeadRefNotFoundError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo, error: Exception):
-        msg = f"""
-        ValueError raised while trying to get rev 'HEAD' from repo at
-        '{repo.working_dir}': '{error}'. This may have occurred because there
-        are no commits in the current repository. However, this should never be
-        the case, because ki repositories must be instantiated with a 'ki clone
-        <collection>' command, and this command creates an initial commit.
-        """
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class GitRefNotFoundError -(repo: git.repo.base.Repo, sha: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class GitRefNotFoundError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo, sha: str):
-        msg = f"Repo at '{repo.working_dir}' doesn't contain rev '{sha}'"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class InconsistentFieldNamesWarning -(x: str, y: str, decknote: DeckNote) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class InconsistentFieldNamesWarning(Warning):
-    @beartype
-    def __init__(self, x: str, y: str, decknote: DeckNote):
-        top = f"Warning: Inconsistent field names ('{x}' != '{y}')"
-        msg = f"""
-        Expected a field '{x}' for notetype '{decknote.model}', but got a field
-        '{y}' in note with GUID '{decknote.guid}'.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class KiRepo -(repo: git.repo.base.Repo, root: Dir, ki: Dir, col_file: File, backups_dir: Dir, config_file: File, hashes_file: File, models_file: File) -
-
-

UNSAFE: A ki repository, including: -- .ki/hashes -- .ki/config

-

Existence of collection path is guaranteed.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class KiRepo:
-    """
-    UNSAFE: A ki repository, including:
-    - .ki/hashes
-    - .ki/config
-
-    Existence of collection path is guaranteed.
-    """
-
-    # pylint: disable=invalid-name
-
-    repo: git.Repo
-    root: Dir
-    ki: Dir
-    col_file: File
-    backups_dir: Dir
-    config_file: File
-    hashes_file: File
-    models_file: File
-
-

Class variables

-
-
var backups_dirDir
-
-
-
-
var col_fileFile
-
-
-
-
var config_fileFile
-
-
-
-
var hashes_fileFile
-
-
-
-
var kiDir
-
-
-
-
var models_fileFile
-
-
-
-
var repo : git.repo.base.Repo
-
-
-
-
var rootDir
-
-
-
-
-
-
-class KiRev -(kirepo: KiRepo, sha: str) -
-
-

UNSAFE: A repo-commit pair, where sha is guaranteed to be an extant -commit hash of repo.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class KiRev:
-    """
-    UNSAFE: A repo-commit pair, where `sha` is guaranteed to be an extant
-    commit hash of `repo`.
-    """
-
-    kirepo: KiRepo
-    sha: str
-
-

Class variables

-
-
var kirepoKiRepo
-
-
-
-
var sha : str
-
-
-
-
-
- -
-

UNSAFE: Indicates that this path was a symlink when tested.

-
- -Expand source code - -
class Link(type(Path())):
-    """UNSAFE: Indicates that this path was a symlink when tested."""
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-
-
-class MediaBytes -(file: File, old: bytes, new: bytes) -
-
-

A media file, its old bytes (from collection) and new bytes (from file).

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class MediaBytes:
-    """A media file, its old bytes (from collection) and new bytes (from file)."""
-
-    file: File
-    old: bytes
-    new: bytes
-
-

Class variables

-
-
var fileFile
-
-
-
-
var new : bytes
-
-
-
-
var old : bytes
-
-
-
-
-
-
-class MediaDirectoryDeckNameCollisionWarning -
-
-

Base class for warning categories.

-
- -Expand source code - -
class MediaDirectoryDeckNameCollisionWarning(Warning):
-    @beartype
-    def __init__(self):
-        top = "Decks with name '_media' skipped as name is reserved"
-        super().__init__(f"{top}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingDirectoryError -(path: pathlib.Path, info: str = '') -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class MissingDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = f"Directory not found: '{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingFieldOrdinalError -(ord: int, model: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class MissingFieldOrdinalError(RuntimeError):
-    # pylint: disable=redefined-builtin
-
-    @beartype
-    def __init__(self, ord: int, model: str):
-        msg = f"Field with ordinal {ord} missing from notetype '{model}'."
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingFileError -(path: pathlib.Path, info: str = '') -
-
-

File not found.

-
- -Expand source code - -
class MissingFileError(FileNotFoundError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        header = f"File not found: '{path}'"
-        msg = f"{info.rstrip()}"
-        super().__init__(f"{header}\n\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.FileNotFoundError
  • -
  • builtins.OSError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingMediaDirectoryError -(col_path: str, media_dir: pathlib.Path) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class MissingMediaDirectoryError(RuntimeError):
-    @beartype
-    def __init__(self, col_path: str, media_dir: Path):
-        top = f"Missing or bad Anki collection media directory '{media_dir}' "
-        top += f"while processing collection '{col_path}':"
-        msg = """
-        This should *never* happen, as Anki generates a media directory at the
-        relevant location whenever a `Collection` object is instantiated.  It
-        is possible that the collection's containing directory was manually
-        tampered with, or an old version of Anki incompatible with ki is
-        installed.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingNoteIdError -(nid: int) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class MissingNoteIdError(RuntimeError):
-    @beartype
-    def __init__(self, nid: int):
-        msg = f"Failed to locate note with nid '{nid}' in Anki database."
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingNotetypeError -(model: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class MissingNotetypeError(RuntimeError):
-    @beartype
-    def __init__(self, model: str):
-        msg = f"""
-        Notetype '{model}' doesn't exist. Create it in Anki before adding notes
-        via ki. This may be caused by a corrupted '{MODELS_FILE}' file. The
-        models file must contain definitions for all models that appear in all
-        note files.
-        """
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class MissingTidyExecutableError -(err: FileNotFoundError) -
-
-

File not found.

-
- -Expand source code - -
class MissingTidyExecutableError(FileNotFoundError):
-    @beartype
-    def __init__(self, err: FileNotFoundError):
-        top = "Command not found: 'tidy' (Is 'html5-tidy' installed?)"
-        msg = f"Original exception: {err}"
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.FileNotFoundError
  • -
  • builtins.OSError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NoFile -(*args, **kwargs) -
-
-

A nonexistent file in an extant directory.

-
- -Expand source code - -
class NoFile(NoPath):
-    """A nonexistent file in an extant directory."""
-
-    @property
-    def parent(self):
-        return Dir(super().parent)
-
-

Ancestors

-
    -
  • NoPath
  • -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-

Instance variables

-
-
var parent
-
-

The logical parent of the path.

-
- -Expand source code - -
@property
-def parent(self):
-    return Dir(super().parent)
-
-
-
-
-
-class NoPath -(*args, **kwargs) -
-
-

UNSAFE: Indicates that path was not extant when it was resolved.

-
- -Expand source code - -
class NoPath(type(Path())):
-    """UNSAFE: Indicates that path *was not* extant when it was resolved."""
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-

Subclasses

- -
-
-class NonEmptyWorkingTreeError -(repo: git.repo.base.Repo) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class NonEmptyWorkingTreeError(RuntimeError):
-    @beartype
-    def __init__(self, repo: git.Repo):
-        top = "fatal: Non-empty working tree in freshly cloned repo at "
-        top += f"'{repo.working_dir}'"
-
-        msg = """
-        The working tree in a fresh clone should always be empty, and so if it
-        isn't, this means that some files were either errantly generated during
-        the clone process, or were not committed when they should have been.
-        This may indicate a bug in ki. Please report this on GitHub at
-        https://github.com/langfield/ki/issues.
-        """
-        details = "\nUntracked files:\n"
-        for untracked in repo.untracked_files:
-            details += f"  * {untracked}\n"
-        details += "\nChanged files:\n"
-        for item in repo.index.diff(None):
-            details += f"  * {item.b_path}\n"
-        super().__init__(f"{top}\n\n{errwrap(msg)}\n{details}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NotKiRepoError -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class NotKiRepoError(RuntimeError):
-    @beartype
-    def __init__(self):
-        msg = "fatal: not a ki repository (or any parent up to mount point /)\n"
-        msg += "Stopping at filesystem boundary."
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NoteDBRow -(nid: int, guid: str, mid: int, mod: int, usn: int, tags: str, flds: str, sfld: Union[str, int], csum: int, flags: int, data: str) -
-
-

NoteDBRow(nid: int, guid: str, mid: int, mod: int, usn: int, tags: str, flds: str, sfld: Union[str, int], csum: int, flags: int, data: str)

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class NoteDBRow:
-    nid: int
-    guid: str
-    mid: int
-    mod: int
-    usn: int
-    tags: str
-    flds: str
-    sfld: Union[str, int]
-    csum: int
-    flags: int
-    data: str
-
-

Class variables

-
-
var csum : int
-
-
-
-
var data : str
-
-
-
-
var flags : int
-
-
-
-
var flds : str
-
-
-
-
var guid : str
-
-
-
-
var mid : int
-
-
-
-
var mod : int
-
-
-
-
var nid : int
-
-
-
-
var sfld : Union[str, int]
-
-
-
-
var tags : str
-
-
-
-
var usn : int
-
-
-
-
-
-
-class NoteFieldKeyError -(key: str, nid: int) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class NoteFieldKeyError(RuntimeError):
-    @beartype
-    def __init__(self, key: str, nid: int):
-        msg = f"""
-        Expected field {key} not found in note '{nid}'. This should *never*
-        happen, and indicates a serious failure, since we only ever index
-        `anki.notes.Note` objects on names pulled from their own notetype
-        dictionary.
-        """
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NoteFieldValidationWarning -(nid: int, field: str, notetype: Notetype) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class NoteFieldValidationWarning(Warning):
-    @beartype
-    def __init__(self, nid: int, field: str, notetype: Notetype):
-        top = f"Warning: Bad field '{field}' for notetype '{notetype}' in note '{nid}'"
-        msg = "Try correcting the field name or changing the notetype."
-        msg += f"The fields for the notetype '{notetype}' are:"
-        fields: List[str] = [field.name for field in notetype.flds]
-        listing: str = "  " + "\n  ".join(fields)
-        super().__init__(f"{top}\n{errwrap(msg)}\n{listing}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NoteMetadata -(nid: int, mod: int, mid: int) -
-
-

The nid, mod, and mid of a note.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class NoteMetadata:
-    """The nid, mod, and mid of a note."""
-
-    nid: int
-    mod: int
-    mid: int
-
-

Class variables

-
-
var mid : int
-
-
-
-
var mod : int
-
-
-
-
var nid : int
-
-
-
-
-
-
-class Notetype -(id: int, name: str, type: int, flds: list[Field], tmpls: list[Template], sortf: Field, dict: dict[str, typing.Any]) -
-
-

A typechecked version of anki.models.NotetypeDict for use within ki.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Notetype:
-    """A typechecked version of `anki.models.NotetypeDict` for use within ki."""
-
-    # pylint: disable=invalid-name
-
-    id: int
-    name: str
-    type: int
-    flds: List[Field]
-    tmpls: List[Template]
-    sortf: Field
-
-    # A copy of the `NotetypeDict` object as it was returned from the Anki
-    # database. We keep this around to preserve extra keys that may not always
-    # exist, but the ones above should be required for Anki to function.
-    dict: Dict[str, Any]
-
-

Class variables

-
-
var dict : dict[str, typing.Any]
-
-
-
-
var flds : list[Field]
-
-
-
-
var id : int
-
-
-
-
var name : str
-
-
-
-
var sortfField
-
-
-
-
var tmpls : list[Template]
-
-
-
-
var type : int
-
-
-
-
-
-
-class NotetypeCollisionWarning -(model: Notetype, existing: Notetype) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class NotetypeCollisionWarning(Warning):
-    @beartype
-    def __init__(self, model: Notetype, existing: Notetype):
-        msg = """
-        Collision: new notetype '{model.name}' has same name as existing
-        notetype with mid '{existing.id}', but hashes differ.
-        """
-        super().__init__(f"{errwrap(msg)}\n\n{nt_str(model)}\n\n{nt_str(existing)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NotetypeKeyError -(key: str, name: str) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class NotetypeKeyError(RuntimeError):
-    @beartype
-    def __init__(self, key: str, name: str):
-        msg = f"""
-        Expected key {key} not found in notetype '{name}' parsed from a
-        '{MODELS_FILE}' file in the current repository (may be contained in a
-        subdirectory).
-        """
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class NotetypeMismatchError -(decknote: DeckNote, new_notetype: Notetype) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class NotetypeMismatchError(RuntimeError):
-    @beartype
-    def __init__(self, decknote: DeckNote, new_notetype: Notetype):
-        msg = f"Notetype '{decknote.model}' "
-        msg += f"specified in DeckNote with GUID '{decknote.guid}' "
-        msg += f"does not match passed notetype '{new_notetype}'. "
-        msg += "This should NEVER happen, "
-        msg += "and indicates a bug in the caller to 'update_note()'."
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Patch -(a: pathlib.Path, b: pathlib.Path, diff: whatthepatch.patch.diff) -
-
-

Relative paths and a Diff object.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Patch:
-    """Relative paths and a Diff object."""
-
-    a: Path
-    b: Path
-    diff: whatthepatch.patch.diffobj
-
-

Class variables

-
-
var a : pathlib.Path
-
-
-
-
var b : pathlib.Path
-
-
-
-
var diff : whatthepatch.patch.diff
-
-
-
-
-
- -
-

A not-yet-created symlink path and its extant target.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class PlannedLink:
-    """A not-yet-created symlink path and its extant target."""
-
-    link: NoFile
-    tgt: Union[File, Link]
-
-

Class variables

-
- -
-
-
-
var tgt : Union[FileLink]
-
-
-
-
-
-
-class PseudoFile -(*args, **kwargs) -
-
-

UNSAFE: Indicates that path was extant but weird (e.g. a device or socket) -when it was resolved.

-
- -Expand source code - -
class PseudoFile(type(Path())):
-    """
-    UNSAFE: Indicates that path was extant but weird (e.g. a device or socket)
-    when it was resolved.
-    """
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-
-
-class PushResult -(*args, **kwds) -
-
-

Enum for push() return codes.

-
- -Expand source code - -
class PushResult(Enum):
-    """Enum for `push()` return codes."""
-
-    NONTRIVIAL = "NONTRIVIAL"
-    UP_TO_DATE = "UP_TO_DATE"
-
-

Ancestors

-
    -
  • enum.Enum
  • -
-

Class variables

-
-
var NONTRIVIAL
-
-
-
-
var UP_TO_DATE
-
-
-
-
-
-
-class RenamedMediaFileWarning -(src: str, dst: str) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class RenamedMediaFileWarning(Warning):
-    @beartype
-    def __init__(self, src: str, dst: str):
-        top = f"Media file '{src}' renamed to '{dst}'"
-        msg = """
-        This happens when we push a media file to a collection that already
-        contains another media file with the same name. In this case, Anki does
-        some deduplication by renaming the new one.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Rev -(repo: git.repo.base.Repo, sha: str) -
-
-

UNSAFE: A repo-commit pair, where sha is guaranteed to be an extant -commit hash of repo.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Rev:
-    """
-    UNSAFE: A repo-commit pair, where `sha` is guaranteed to be an extant
-    commit hash of `repo`.
-    """
-
-    repo: git.Repo
-    sha: str
-
-

Class variables

-
-
var repo : git.repo.base.Repo
-
-
-
-
var sha : str
-
-
-
-
-
-
-class Root -(did: int, node: anki.decks_pb2.DeckTreeNode, deckd: None, mediad: None, children: list[Deck], fullname: str) -
-
-

Root(did: int, node: anki.decks_pb2.DeckTreeNode, deckd: None, mediad: None, children: list[ki.types.Deck], fullname: str)

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Root:
-    did: DeckId
-    node: DeckTreeNode
-    deckd: None
-    mediad: None
-    children: List[Deck]
-    fullname: str
-
-

Class variables

-
-
var children : list[Deck]
-
-
-
-
var deckd : None
-
-
-
-
var did : int
-
-
-
-
var fullname : str
-
-
-
-
var mediad : None
-
-
-
-
var node : anki.decks_pb2.DeckTreeNode
-
-
-
-
-
-
-class SQLiteLockError -(col_file: File, err: sqlite3.DatabaseError) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class SQLiteLockError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File, err: sqlite3.DatabaseError):
-        if str(err) == DATABASE_LOCKED_MSG:
-            header = f"fatal: {DATABASE_LOCKED_MSG} (Anki must not be running)."
-            super().__init__(header)
-            return
-        header = "Unexpected SQLite3 error while attempting to acquire lock on file: "
-        header += f"'{col_file}':"
-        msg = f"""
-        A 'sqlite3.DatabaseError' was raised with error message: '{str(err)}'.
-        This may indicate that either the database file at the location
-        specified above is corrupted, or the config file at '.ki/config' is
-        pointing to the wrong location. (The latter may occur in the unlikely
-        event that the collection file in the Anki data directory has been
-        accidentally overwritten.)
-        """
-        super().__init__(f"{header}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Singleton -(*args, **kwargs) -
-
-

UNSAFE: A path consisting of a single component (e.g. file, not dir/file).

-
- -Expand source code - -
class Singleton(type(Path())):
-    """UNSAFE: A path consisting of a single component (e.g. `file`, not `dir/file`)."""
-
-

Ancestors

-
    -
  • pathlib.PosixPath
  • -
  • pathlib.Path
  • -
  • pathlib.PurePosixPath
  • -
  • pathlib.PurePath
  • -
-
-
-class StrangeExtantPathError -(path: pathlib.Path, info: str = '') -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class StrangeExtantPathError(RuntimeError):
-    @beartype
-    def __init__(self, path: Path, info: str = ""):
-        msg = "A normal file or directory was expected, but got a weird pseudofile "
-        msg += "(e.g. a socket, or a device): "
-        msg += f"'{path}'{info.rstrip()}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Submodule -(sm: git.objects.submodule.base.Submodule, sm_repo: git.repo.base.Repo, rel_root: pathlib.Path, branch: str) -
-
-

Submodule(sm: git.objects.submodule.base.Submodule, sm_repo: git.repo.base.Repo, rel_root: pathlib.Path, branch: str)

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Submodule:
-    sm: git.Submodule
-    sm_repo: git.Repo
-    rel_root: Path
-    branch: str
-
-

Class variables

-
-
var branch : str
-
-
-
-
var rel_root : pathlib.Path
-
-
-
-
var sm : git.objects.submodule.base.Submodule
-
-
-
-
var sm_repo : git.repo.base.Repo
-
-
-
-
-
-
-class TargetExistsError -(target: pathlib.Path) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class TargetExistsError(RuntimeError):
-    @beartype
-    def __init__(self, target: Path):
-        msg = f"fatal: destination path '{target}' already exists and is "
-        msg += "not an empty directory."
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class Template -(name: str, qfmt: str, afmt: str, ord: Optional[int]) -
-
-

A typechecked version of anki.models.TemplateDict for use within ki.

-
- -Expand source code - -
@beartype
-@dataclass(frozen=True)
-class Template:
-    """A typechecked version of `anki.models.TemplateDict` for use within ki."""
-
-    name: str
-    qfmt: str
-    afmt: str
-    ord: Optional[int]
-
-

Class variables

-
-
var afmt : str
-
-
-
-
var name : str
-
-
-
-
var ord : Optional[int]
-
-
-
-
var qfmt : str
-
-
-
-
-
-
-class UnhealthyNoteWarning -(note: anki.notes.Note, health: int) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class UnhealthyNoteWarning(Warning):
-    @beartype
-    def __init__(self, note: Note, health: int):
-        top = f"Note with nid '{note.id}' failed fields check with unknown error code"
-        msg = f"""
-        Anki fields health check code: '{health}'
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class UnnamedNotetypeError -(nt: dict[str, typing.Any]) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class UnnamedNotetypeError(RuntimeError):
-    @beartype
-    def __init__(self, nt: NotetypeDict):
-        msg = f"""
-        Failed to find 'name' field for a notetype while parsing
-        a '{MODELS_FILE}' file in the current repository (may be
-        contained in a subdirectory):
-        """
-        super().__init__(errwrap(msg) + "\n" + str(nt))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class UpdatesRejectedError -(col_file: File) -
-
-

Unspecified run-time error.

-
- -Expand source code - -
class UpdatesRejectedError(RuntimeError):
-    @beartype
-    def __init__(self, col_file: File):
-        msg = f"Failed to push some commits to '{col_file}'\n{HINT}"
-        super().__init__(errwrap(msg))
-
-

Ancestors

-
    -
  • builtins.RuntimeError
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-class WrongFieldCountWarning -(decknote: DeckNote, names: list[str]) -
-
-

Base class for warning categories.

-
- -Expand source code - -
class WrongFieldCountWarning(Warning):
-    @beartype
-    def __init__(self, decknote: DeckNote, names: List[str]):
-        top = f"Warning: Wrong number of fields for model '{decknote.model}'"
-        msg = f"""
-        The notetype '{decknote.model}' takes '{len(names)}' fields, but got
-        '{len(decknote.fields.keys())}' for note with GUID '{decknote.guid}'.
-        """
-        super().__init__(f"{top}\n{errwrap(msg)}")
-
-

Ancestors

-
    -
  • builtins.Warning
  • -
  • builtins.Exception
  • -
  • builtins.BaseException
  • -
-
-
-
-
- -
- - - \ No newline at end of file