diff --git a/nixos/doc/manual/default.nix b/nixos/doc/manual/default.nix
index 714b3efca20aa..2e07edd61c2a7 100644
--- a/nixos/doc/manual/default.nix
+++ b/nixos/doc/manual/default.nix
@@ -135,28 +135,32 @@ let
}
'';
+ prepareManualFromMD = ''
+ cp -r --no-preserve=all $inputs/* .
+
+ substituteInPlace ./manual.md \
+ --replace '@NIXOS_VERSION@' "${version}"
+ substituteInPlace ./configuration/configuration.md \
+ --replace \
+ '@MODULE_CHAPTERS@' \
+ ${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
+ substituteInPlace ./nixos-options.md \
+ --replace \
+ '@NIXOS_OPTIONS_JSON@' \
+ ${optionsDoc.optionsJSON}/share/doc/nixos/options.json
+ substituteInPlace ./development/writing-nixos-tests.section.md \
+ --replace \
+ '@NIXOS_TEST_OPTIONS_JSON@' \
+ ${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
+ '';
+
manual-combined = runCommand "nixos-manual-combined"
{ inputs = lib.sourceFilesBySuffices ./. [ ".xml" ".md" ];
nativeBuildInputs = [ pkgs.nixos-render-docs pkgs.libxml2.bin pkgs.libxslt.bin ];
meta.description = "The NixOS manual as plain docbook XML";
}
''
- cp -r --no-preserve=all $inputs/* .
-
- substituteInPlace ./manual.md \
- --replace '@NIXOS_VERSION@' "${version}"
- substituteInPlace ./configuration/configuration.md \
- --replace \
- '@MODULE_CHAPTERS@' \
- ${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
- substituteInPlace ./nixos-options.md \
- --replace \
- '@NIXOS_OPTIONS_JSON@' \
- ${optionsDoc.optionsJSON}/share/doc/nixos/options.json
- substituteInPlace ./development/writing-nixos-tests.section.md \
- --replace \
- '@NIXOS_TEST_OPTIONS_JSON@' \
- ${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
+ ${prepareManualFromMD}
nixos-render-docs -j $NIX_BUILD_CORES manual docbook \
--manpage-urls ${manpageUrls} \
@@ -193,7 +197,14 @@ in rec {
# Generate the NixOS manual.
manualHTML = runCommand "nixos-manual-html"
- { nativeBuildInputs = [ buildPackages.libxml2.bin buildPackages.libxslt.bin ];
+ { nativeBuildInputs =
+ if allowDocBook then [
+ buildPackages.libxml2.bin
+ buildPackages.libxslt.bin
+ ] else [
+ buildPackages.nixos-render-docs
+ ];
+ inputs = lib.optionals (! allowDocBook) (lib.sourceFilesBySuffices ./. [ ".md" ]);
meta.description = "The NixOS manual in HTML format";
allowedReferences = ["out"];
}
@@ -201,23 +212,44 @@ in rec {
# Generate the HTML manual.
dst=$out/share/doc/nixos
mkdir -p $dst
- xsltproc \
- ${manualXsltprocOptions} \
- --stringparam id.warnings "1" \
- --nonet --output $dst/ \
- ${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
- ${manual-combined}/manual-combined.xml \
- |& tee xsltproc.out
- grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
- rm xsltproc.out
-
- mkdir -p $dst/images/callouts
- cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
cp ${../../../doc/style.css} $dst/style.css
cp ${../../../doc/overrides.css} $dst/overrides.css
cp -r ${pkgs.documentation-highlighter} $dst/highlightjs
+ ${if allowDocBook then ''
+ xsltproc \
+ ${manualXsltprocOptions} \
+ --stringparam id.warnings "1" \
+ --nonet --output $dst/ \
+ ${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
+ ${manual-combined}/manual-combined.xml \
+ |& tee xsltproc.out
+ grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
+ rm xsltproc.out
+
+ mkdir -p $dst/images/callouts
+ cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
+ '' else ''
+ ${prepareManualFromMD}
+
+ # TODO generator is set like this because the docbook/md manual compare workflow will
+ # trigger if it's different
+ nixos-render-docs -j $NIX_BUILD_CORES manual html \
+ --manpage-urls ${manpageUrls} \
+ --revision ${lib.escapeShellArg revision} \
+ --generator "DocBook XSL Stylesheets V${docbook_xsl_ns.version}" \
+ --stylesheet style.css \
+ --stylesheet overrides.css \
+ --stylesheet highlightjs/mono-blue.css \
+ --script ./highlightjs/highlight.pack.js \
+ --script ./highlightjs/loader.js \
+ --toc-depth 1 \
+ --chunk-toc-depth 1 \
+ ./manual.md \
+ $dst/index.html
+ ''}
+
mkdir -p $out/nix-support
echo "nix-build out $out" >> $out/nix-support/hydra-build-products
echo "doc manual $dst" >> $out/nix-support/hydra-build-products
diff --git a/nixos/doc/manual/manual.md b/nixos/doc/manual/manual.md
index 1972eaeda8728..8cb766eeccf64 100644
--- a/nixos/doc/manual/manual.md
+++ b/nixos/doc/manual/manual.md
@@ -47,7 +47,10 @@ development/development.md
contributing-to-this-manual.chapter.md
```
-```{=include=} appendix
+```{=include=} appendix html:into-file=//options.html
nixos-options.md
+```
+
+```{=include=} appendix html:into-file=//release-notes.html
release-notes/release-notes.md
```
diff --git a/nixos/modules/services/web-apps/akkoma.md b/nixos/modules/services/web-apps/akkoma.md
index 5419940a68d63..83dd1a8b35f23 100644
--- a/nixos/modules/services/web-apps/akkoma.md
+++ b/nixos/modules/services/web-apps/akkoma.md
@@ -318,8 +318,8 @@ to make packages available in the chroot.
{option}`services.systemd.akkoma.serviceConfig.BindPaths` and
{option}`services.systemd.akkoma.serviceConfig.BindReadOnlyPaths` permit access to outside paths
through bind mounts. Refer to
-[{manpage}`systemd.exec(5)`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
-for details.
+[`BindPaths=`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
+of {manpage}`systemd.exec(5)` for details.
### Distributed deployment {#modules-services-akkoma-distributed-deployment}
diff --git a/nixos/modules/system/boot/networkd.nix b/nixos/modules/system/boot/networkd.nix
index 188f2f64dc848..d1ce3d13ee855 100644
--- a/nixos/modules/system/boot/networkd.nix
+++ b/nixos/modules/system/boot/networkd.nix
@@ -1948,7 +1948,7 @@ in
Extra command-line arguments to pass to systemd-networkd-wait-online.
These also affect per-interface `systemd-network-wait-online@` services.
- See [{manpage}`systemd-networkd-wait-online.service(8)`](https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.service.html) for all available options.
+ See {manpage}`systemd-networkd-wait-online.service(8)` for all available options.
'';
type = with types; listOf str;
default = [];
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
index 637185227e83f..7fc14c1631ef0 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/asciidoc.py
@@ -1,13 +1,11 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from urllib.parse import quote
from .md import Renderer
-import markdown_it
from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
_asciidoc_escapes = {
# escape all dots, just in case one is pasted at SOL
@@ -59,8 +57,8 @@ class AsciiDocRenderer(Renderer):
_list_stack: list[List]
_attrspans: list[str]
- def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
- super().__init__(manpage_urls, parser)
+ def __init__(self, manpage_urls: Mapping[str, str]):
+ super().__init__(manpage_urls)
self._parstack = [ Par("\n\n", "====") ]
self._list_stack = []
self._attrspans = []
@@ -96,142 +94,103 @@ def _list_close(self) -> str:
self._list_stack.pop()
return ""
- def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return asciidoc_escape(token.content)
- def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
- def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " +\n"
- def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" "
- def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"``{asciidoc_escape(token.content)}``"
- def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return self.fence(token, tokens, i, options, env)
- def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return self.fence(token, tokens, i)
+ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}["
- def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "]"
- def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
# allow the next token to be a block or an inline.
return f'\n{self._list_stack[-1].head} {{empty}}'
- def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
- def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '*')
- def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
- def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
- def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
- def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
- def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
- def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
attrs = f"[source,{token.info}]\n" if token.info else ""
code = token.content
if code.endswith('\n'):
code = code[:-1]
return f"{self._break(True)}{attrs}----\n{code}\n----"
- def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._break(True)
self._enter_block(False)
return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n"
- def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return f"\n{self._parstack[-1].block_delim}"
- def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("NOTE")
- def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("CAUTION")
- def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("IMPORTANT")
- def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("TIP")
- def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("WARNING")
- def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{self._break()}[]"
- def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
- def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
return ":: {empty}"
- def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
- def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = asciidoc_escape(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"link:{quote(url, safe='/:')}[{content}]"
return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``"
- def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"[[{token.attrs['id']}]]"
- def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
(id_part, class_part) = ("", "")
if id := token.attrs.get('id'):
@@ -241,22 +200,17 @@ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options
class_part = "kbd:["
self._attrspans.append("]")
else:
- return super().attr_span_begin(token, tokens, i, options, env)
+ return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
- def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
- def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup.replace("#", "=") + " "
- def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
- def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '.')
- def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
index 4a708b1f92c66..9649eb653d444 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py
@@ -1,12 +1,10 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from .md import md_escape, md_make_code, Renderer
-import markdown_it
from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
@dataclass(kw_only=True)
class List:
@@ -26,8 +24,8 @@ class CommonMarkRenderer(Renderer):
_link_stack: list[str]
_list_stack: list[List]
- def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
- super().__init__(manpage_urls, parser)
+ def __init__(self, manpage_urls: Mapping[str, str]):
+ super().__init__(manpage_urls)
self._parstack = [ Par("") ]
self._link_stack = []
self._list_stack = []
@@ -58,39 +56,29 @@ def _indent_raw(self, s: str) -> str:
return s
return f"\n{self._parstack[-1].indent}".join(s.splitlines())
- def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return self._indent_raw(md_escape(token.content))
- def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
- def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" {self._break()}"
- def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
- def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return md_make_code(token.content)
- def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return self.fence(token, tokens, i, options, env)
- def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return self.fence(token, tokens, i)
+ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
self._link_stack.append(cast(str, token.attrs['href']))
return "["
- def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"]({md_escape(self._link_stack.pop())})"
- def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
lst = self._list_stack[-1]
lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2)
lst.first_item_seen = True
@@ -100,132 +88,99 @@ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options:
lst.next_idx += 1
self._enter_block(" " * (len(head) + 1))
return f'{lbreak}{head} '
- def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
- def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
- def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
- def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
- def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
- def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
- def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
- def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
code = token.content
if code.endswith('\n'):
code = code[:-1]
pbreak = self._maybe_parbreak()
return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True))
- def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block("> ")
return pbreak + "> "
- def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
- def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
- def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Caution")
- def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Important")
- def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Tip")
- def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Warning")
- def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=False))
return ""
- def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
- def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block(" ")
# add an opening zero-width non-joiner to separate *our* emphasis from possible
# emphasis in the provided term
return f'{pbreak} - *{chr(0x200C)}'
- def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{chr(0x200C)}*"
- def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return ""
- def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
- def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = md_make_code(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"[{content}]({url})"
return content # no roles in regular commonmark
- def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# there's no way we can emit attrspans correctly in all cases. we could use inline
# html for ids, but that would not round-trip. same holds for classes. since this
# renderer is only used for approximate options export and all of these things are
# not allowed in options we can ignore them for now.
return ""
- def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup + " "
- def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
- def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(
List(next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
- def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
index e6a761dcf13fd..4c90606ff4558 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/docbook.py
@@ -1,9 +1,8 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
from typing import Any, cast, Optional, NamedTuple
import markdown_it
from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
from xml.sax.saxutils import escape, quoteattr
from .md import Renderer
@@ -32,26 +31,23 @@ class Heading(NamedTuple):
partintro_closed: bool = False
class DocBookRenderer(Renderer):
- __output__ = "docbook"
_link_tags: list[str]
_deflists: list[Deflist]
_headings: list[Heading]
_attrspans: list[str]
- def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
- super().__init__(manpage_urls, parser)
+ def __init__(self, manpage_urls: Mapping[str, str]):
+ super().__init__(manpage_urls)
self._link_tags = []
self._deflists = []
self._headings = []
self._attrspans = []
- def render(self, tokens: Sequence[Token], options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- result = super().render(tokens, options, env)
- result += self._close_headings(None, env)
+ def render(self, tokens: Sequence[Token]) -> str:
+ result = super().render(tokens)
+ result += self._close_headings(None)
return result
- def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def renderInline(self, tokens: Sequence[Token]) -> str:
# HACK to support docbook links and xrefs. link handling is only necessary because the docbook
# manpage stylesheet converts - in urls to a mathematical minus, which may be somewhat incorrect.
for i, token in enumerate(tokens):
@@ -65,135 +61,98 @@ def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
if tokens[i + 1].type == 'text' and tokens[i + 1].content == token.attrs['href']:
tokens[i + 1].content = ''
- return super().renderInline(tokens, options, env)
+ return super().renderInline(tokens)
- def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return escape(token.content)
- def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
- def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# should check options.breaks() and emit hard break if so
return "\n"
- def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{escape(token.content)}"
- def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{escape(token.content)}"
- def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._link_tags.append(token.tag)
href = cast(str, token.attrs['href'])
(attr, start) = ('linkend', 1) if href[0] == '#' else ('xlink:href', 0)
return f"<{token.tag} {attr}={quoteattr(href[start:])}>"
- def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{self._link_tags.pop()}>"
- def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
# HACK open and close para for docbook change size. remove soon.
- def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f"\n"
- def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
- def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
info = f" language={quoteattr(token.info)}" if token.info != "" else ""
return f"{escape(token.content)}"
- def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "
"
- def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
# markdown-it emits tokens based on the html syntax tree, but docbook is
# slightly different. html has
{{}}
,
# docbook has {}
# we have to reject multiple definitions for the same term for time being.
- def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.append(Deflist())
return ""
- def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.pop()
return ""
- def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists[-1].has_dd = False
return ""
- def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if self._deflists[-1].has_dd:
raise Exception("multiple definitions per term not supported")
self._deflists[-1].has_dd = True
return ""
- def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] == 'command':
return f"{escape(token.content)}"
if token.meta['name'] == 'file':
@@ -216,8 +175,7 @@ def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: Opti
else:
return ref
raise NotImplementedError("md node not supported yet", token)
- def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# we currently support *only* inline anchors and the special .keycap class to produce
# docbook elements.
(id_part, class_part) = ("", "")
@@ -228,31 +186,26 @@ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options
class_part = ""
self._attrspans.append("")
else:
- return super().attr_span_begin(token, tokens, i, options, env)
+ return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
- def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
- def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
start = f' startingnumber="{token.attrs["start"]}"' if 'start' in token.attrs else ""
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f""
- def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f""
- def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
hlevel = int(token.tag[1:])
- result = self._close_headings(hlevel, env)
- (tag, attrs) = self._heading_tag(token, tokens, i, options, env)
+ result = self._close_headings(hlevel)
+ (tag, attrs) = self._heading_tag(token, tokens, i)
self._headings.append(Heading(tag, hlevel))
attrs_str = "".join([ f" {k}={quoteattr(v)}" for k, v in attrs.items() ])
return result + f'<{tag}{attrs_str}>\n'
- def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
heading = self._headings[-1]
result = ''
if heading.container_tag == 'part':
@@ -264,16 +217,14 @@ def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options:
maybe_id = " xml:id=" + quoteattr(id + "-intro")
result += f""
return result
- def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if id := token.attrs.get('id'):
return f""
return ""
- def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -> str:
+ def _close_headings(self, level: Optional[int]) -> str:
# we rely on markdown-it producing h{1..6} tags in token.tag for this to work
result = []
while len(self._headings):
@@ -286,8 +237,7 @@ def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -
break
return "\n".join(result)
- def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
+ def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
attrs = {}
if id := token.attrs.get('id'):
attrs['xml:id'] = cast(str, id)
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
new file mode 100644
index 0000000000000..39d2da6adf8c0
--- /dev/null
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py
@@ -0,0 +1,245 @@
+from collections.abc import Mapping, Sequence
+from typing import cast, Optional, NamedTuple
+
+from html import escape
+from markdown_it.token import Token
+
+from .manual_structure import XrefTarget
+from .md import Renderer
+
+class UnresolvedXrefError(Exception):
+ pass
+
+class Heading(NamedTuple):
+ container_tag: str
+ level: int
+ html_tag: str
+ # special handling for part content: whether partinfo div was already closed from
+ # elsewhere or still needs closing.
+ partintro_closed: bool
+ # tocs are generated when the heading opens, but have to be emitted into the file
+ # after the heading titlepage (and maybe partinfo) has been closed.
+ toc_fragment: str
+
+_bullet_list_styles = [ 'disc', 'circle', 'square' ]
+_ordered_list_styles = [ '1', 'a', 'i', 'A', 'I' ]
+
+class HTMLRenderer(Renderer):
+ _xref_targets: Mapping[str, XrefTarget]
+
+ _headings: list[Heading]
+ _attrspans: list[str]
+ _hlevel_offset: int = 0
+ _bullet_list_nesting: int = 0
+ _ordered_list_nesting: int = 0
+
+ def __init__(self, manpage_urls: Mapping[str, str], xref_targets: Mapping[str, XrefTarget]):
+ super().__init__(manpage_urls)
+ self._headings = []
+ self._attrspans = []
+ self._xref_targets = xref_targets
+
+ def render(self, tokens: Sequence[Token]) -> str:
+ result = super().render(tokens)
+ result += self._close_headings(None)
+ return result
+
+ def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return escape(token.content)
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "
"
+ def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "
"
+ def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "\n"
+ def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return f'{escape(token.content)}
'
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return self.fence(token, tokens, i)
+ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ href = escape(cast(str, token.attrs['href']), True)
+ tag, title, target, text = "link", "", 'target="_top"', ""
+ if href.startswith('#'):
+ if not (xref := self._xref_targets.get(href[1:])):
+ raise UnresolvedXrefError(f"bad local reference, id {href} not known")
+ if tokens[i + 1].type == 'link_close':
+ tag, text = "xref", xref.title_html
+ if xref.title:
+ title = f'title="{escape(xref.title, True)}"'
+ target, href = "", xref.href()
+ return f'{text}'
+ def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ extra = 'compact' if token.meta.get('compact', False) else ''
+ style = _bullet_list_styles[self._bullet_list_nesting % len(_bullet_list_styles)]
+ self._bullet_list_nesting += 1
+ return f'"
+ def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ # TODO use token.info. docbook doesn't so we can't yet.
+ return f'\n{escape(token.content)}
'
+ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "
"
+ def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return 'Note
'
+ def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return 'Caution
'
+ def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return 'Important
'
+ def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return 'Tip
'
+ def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return 'Warning
'
+ def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "
"
+ def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ''
+ def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+ def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ if token.meta['name'] == 'command':
+ return f'{escape(token.content)}'
+ if token.meta['name'] == 'file':
+ return f'{escape(token.content)}
'
+ if token.meta['name'] == 'var':
+ return f'{escape(token.content)}
'
+ if token.meta['name'] == 'env':
+ return f'{escape(token.content)}
'
+ if token.meta['name'] == 'option':
+ return f'{escape(token.content)}
'
+ if token.meta['name'] == 'manpage':
+ [page, section] = [ s.strip() for s in token.content.rsplit('(', 1) ]
+ section = section[:-1]
+ man = f"{page}({section})"
+ title = f'{escape(page)}'
+ vol = f"({escape(section)})"
+ ref = f'{title}{vol}'
+ if man in self._manpage_urls:
+ return f'{ref}'
+ else:
+ return ref
+ return super().myst_role(token, tokens, i)
+ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ # we currently support *only* inline anchors and the special .keycap class to produce
+ # keycap-styled spans.
+ (id_part, class_part) = ("", "")
+ if s := token.attrs.get('id'):
+ id_part = f''
+ if s := token.attrs.get('class'):
+ if s == 'keycap':
+ class_part = ''
+ self._attrspans.append("")
+ else:
+ return super().attr_span_begin(token, tokens, i)
+ else:
+ self._attrspans.append("")
+ return id_part + class_part
+ def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return self._attrspans.pop()
+ def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ hlevel = int(token.tag[1:])
+ htag, hstyle = self._make_hN(hlevel)
+ if hstyle:
+ hstyle = f'style="{escape(hstyle, True)}"'
+ if anchor := cast(str, token.attrs.get('id', '')):
+ anchor = f''
+ result = self._close_headings(hlevel)
+ tag = self._heading_tag(token, tokens, i)
+ toc_fragment = self._build_toc(tokens, i)
+ self._headings.append(Heading(tag, hlevel, htag, tag != 'part', toc_fragment))
+ return (
+ f'{result}'
+ f''
+ f'
'
+ f'
'
+ f'
'
+ f' <{htag} class="title" {hstyle}>'
+ f' {anchor}'
+ )
+ def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ heading = self._headings[-1]
+ result = (
+ f' {heading.html_tag}>'
+ f'
'
+ f'
'
+ f'
'
+ )
+ if heading.container_tag == 'part':
+ result += '
'
+ else:
+ result += heading.toc_fragment
+ return result
+ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ extra = 'compact' if token.meta.get('compact', False) else ''
+ start = f'start="{token.attrs["start"]}"' if 'start' in token.attrs else ""
+ style = _ordered_list_styles[self._ordered_list_nesting % len(_ordered_list_styles)]
+ self._ordered_list_nesting += 1
+ return f'
"
+ def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ if id := token.attrs.get('id'):
+ return f'
'
+ return ""
+ def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return ""
+
+ def _make_hN(self, level: int) -> tuple[str, str]:
+ return f"h{min(6, max(1, level + self._hlevel_offset))}", ""
+
+ def _maybe_close_partintro(self) -> str:
+ if self._headings:
+ heading = self._headings[-1]
+ if heading.container_tag == 'part' and not heading.partintro_closed:
+ self._headings[-1] = heading._replace(partintro_closed=True)
+ return heading.toc_fragment + "
"
+ return ""
+
+ def _close_headings(self, level: Optional[int]) -> str:
+ result = []
+ while len(self._headings) and (level is None or self._headings[-1].level >= level):
+ result.append(self._maybe_close_partintro())
+ result.append("
")
+ self._headings.pop()
+ return "\n".join(result)
+
+ def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "section"
+ def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
+ return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
index 1b796d9f04861..a01aa1b4634b4 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manpage.py
@@ -1,4 +1,4 @@
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Iterable, Optional
@@ -6,7 +6,6 @@
import markdown_it
from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
from .md import Renderer
@@ -75,8 +74,6 @@ class List:
# horizontal motion in a line) we do attempt to copy the style of mdoc(7) semantic requests
# as appropriate for each markup element.
class ManpageRenderer(Renderer):
- __output__ = "man"
-
# whether to emit mdoc .Ql equivalents for inline code or just the contents. this is
# mainly used by the options manpage converter to not emit extra quotes in defaults
# and examples where it's already clear from context that the following text is code.
@@ -90,9 +87,8 @@ class ManpageRenderer(Renderer):
_list_stack: list[List]
_font_stack: list[str]
- def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str],
- parser: Optional[markdown_it.MarkdownIt] = None):
- super().__init__(manpage_urls, parser)
+ def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str]):
+ super().__init__(manpage_urls)
self._href_targets = href_targets
self._link_stack = []
self._do_parbreak_stack = []
@@ -126,36 +122,27 @@ def _admonition_close(self) -> str:
self._leave_block()
return ".RE"
- def render(self, tokens: Sequence[Token], options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def render(self, tokens: Sequence[Token]) -> str:
self._do_parbreak_stack = [ False ]
self._font_stack = [ "\\fR" ]
- return super().render(tokens, options, env)
+ return super().render(tokens)
- def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return man_escape(token.content)
- def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
- def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".br"
- def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " "
- def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = _protect_spaces(man_escape(token.content))
return f"\\fR\\(oq{s}\\(cq\\fP" if self.inline_code_is_quoted else s
- def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return self.fence(token, tokens, i, options, env)
- def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return self.fence(token, tokens, i)
+ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = cast(str, token.attrs['href'])
self._link_stack.append(href)
text = ""
@@ -164,8 +151,7 @@ def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: Opti
text = self._href_targets[href]
self._font_stack.append("\\fB")
return f"\\fB{text}\0 <"
- def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = self._link_stack.pop()
text = ""
if self.link_footnotes is not None:
@@ -177,8 +163,7 @@ def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: Opt
text = "\\fR" + man_escape(f"[{idx}]")
self._font_stack.pop()
return f">\0 {text}{self._font_stack[-1]}"
- def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
lst = self._list_stack[-1]
maybe_space = '' if lst.compact or not lst.first_item_seen else '.sp\n'
@@ -192,36 +177,28 @@ def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options:
f'.RS {lst.width}\n'
f"\\h'-{len(head) + 1}'\\fB{man_escape(head)}\\fP\\h'1'\\c"
)
- def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
- def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(width=4, compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
- def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
- def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fI")
return "\\fI"
- def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
- def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fB")
return "\\fB"
- def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
- def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = man_escape(token.content).rstrip('\n')
return (
'.sp\n'
@@ -231,8 +208,7 @@ def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsD
'.fi\n'
'.RE'
)
- def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
maybe_par = self._maybe_parbreak("\n")
self._enter_block()
return (
@@ -240,62 +216,44 @@ def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options
".RS 4\n"
f"\\h'-3'\\fI\\(lq\\(rq\\fP\\h'1'\\c"
)
- def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
- def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
- def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Caution")
- def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Important")
- def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Tip")
- def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Warning")
- def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
- def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RS 4"
- def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RE"
- def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".PP"
- def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
return ".RS 4"
- def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
- def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] in [ 'command', 'env', 'option' ]:
return f'\\fB{man_escape(token.content)}\\fP'
elif token.meta['name'] in [ 'file', 'var' ]:
@@ -306,23 +264,18 @@ def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: Opti
return f'\\fB{man_escape(page)}\\fP\\fR({man_escape(section)})\\fP'
else:
raise NotImplementedError("md node not supported yet", token)
- def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# mdoc knows no anchors so we can drop those, but classes must be rejected.
if 'class' in token.attrs:
- return super().attr_span_begin(token, tokens, i, options, env)
+ return super().attr_span_begin(token, tokens, i)
return ""
- def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
- def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
- def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
- def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# max item head width for a number, a dot, and one leading space and one trailing space
width = 3 + len(str(cast(int, token.meta['end'])))
self._list_stack.append(
@@ -330,7 +283,6 @@ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, optio
next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
- def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
+ def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
index efc8b02e8d6b2..40dea3c7d1d85 100644
--- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
+++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
@@ -1,160 +1,85 @@
import argparse
+import html
import json
+import re
+import xml.sax.saxutils as xml
from abc import abstractmethod
-from collections.abc import Mapping, MutableMapping, Sequence
+from collections.abc import Mapping, Sequence
from pathlib import Path
-from typing import Any, cast, NamedTuple, Optional, Union
-from xml.sax.saxutils import escape, quoteattr
+from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union
import markdown_it
from markdown_it.token import Token
-from markdown_it.utils import OptionsDict
-from . import options
-from .docbook import DocBookRenderer, Heading
-from .md import Converter
+from . import md, options
+from .docbook import DocBookRenderer, Heading, make_xml_id
+from .html import HTMLRenderer, UnresolvedXrefError
+from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget
+from .md import Converter, Renderer
+from .utils import Freezeable
-class ManualDocBookRenderer(DocBookRenderer):
- _toplevel_tag: str
-
- def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str],
- parser: Optional[markdown_it.MarkdownIt] = None):
- super().__init__(manpage_urls, parser)
- self._toplevel_tag = toplevel_tag
- self.rules |= {
- 'included_sections': lambda *args: self._included_thing("section", *args),
- 'included_chapters': lambda *args: self._included_thing("chapter", *args),
- 'included_preface': lambda *args: self._included_thing("preface", *args),
- 'included_parts': lambda *args: self._included_thing("part", *args),
- 'included_appendix': lambda *args: self._included_thing("appendix", *args),
- 'included_options': self.included_options,
- }
-
- def render(self, tokens: Sequence[Token], options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- wanted = { 'h1': 'title' }
- wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {}
- for (i, (tag, kind)) in enumerate(wanted.items()):
- if len(tokens) < 3 * (i + 1):
- raise RuntimeError(f"missing {kind} ({tag}) heading")
- token = tokens[3 * i]
- if token.type != 'heading_open' or token.tag != tag:
- assert token.map
- raise RuntimeError(f"expected {kind} ({tag}) heading in line {token.map[0] + 1}", token)
- for t in tokens[3 * len(wanted):]:
- if t.type != 'heading_open' or (info := wanted.get(t.tag)) is None:
- continue
- assert t.map
- raise RuntimeError(
- f"only one {info[0]} heading ({t.markup} [text...]) allowed per "
- f"{self._toplevel_tag}, but found a second in lines [{t.map[0] + 1}..{t.map[1]}]. "
- "please remove all such headings except the first or demote the subsequent headings.",
- t)
-
- # books get special handling because they have *two* title tags. doing this with
- # generic code is more complicated than it's worth. the checks above have verified
- # that both titles actually exist.
- if self._toplevel_tag == 'book':
- assert tokens[1].children
- assert tokens[4].children
- if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
- maybe_id = "xml:id=" + quoteattr(maybe_id)
- return (f''
- f' {self.renderInline(tokens[1].children, options, env)}'
- f' {self.renderInline(tokens[4].children, options, env)}'
- f' {super().render(tokens[6:], options, env)}'
- f'')
-
- return super().render(tokens, options, env)
-
- def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
- (tag, attrs) = super()._heading_tag(token, tokens, i, options, env)
- # render() has already verified that we don't have supernumerary headings and since the
- # book tag is handled specially we can leave the check this simple
- if token.tag != 'h1':
- return (tag, attrs)
- return (self._toplevel_tag, attrs | {
- 'xmlns': "http://docbook.org/ns/docbook",
- 'xmlns:xlink': "http://www.w3.org/1999/xlink",
- })
-
- def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int,
- options: OptionsDict, env: MutableMapping[str, Any]) -> str:
- result = []
- # close existing partintro. the generic render doesn't really need this because
- # it doesn't have a concept of structure in the way the manual does.
- if self._headings and self._headings[-1] == Heading('part', 1):
- result.append("")
- self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
- # must nest properly for structural includes. this requires saving at least
- # the headings stack, but creating new renderers is cheap and much easier.
- r = ManualDocBookRenderer(tag, self._manpage_urls, None)
- for (included, path) in token.meta['included']:
- try:
- result.append(r.render(included, options, env))
- except Exception as e:
- raise RuntimeError(f"rendering {path}") from e
- return "".join(result)
- def included_options(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return cast(str, token.meta['rendered-options'])
-
- # TODO minimize docbook diffs with existing conversions. remove soon.
- def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return super().paragraph_open(token, tokens, i, options, env) + "\n "
- def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return "\n" + super().paragraph_close(token, tokens, i, options, env)
- def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- return f"\n{escape(token.content)}"
- def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
- env: MutableMapping[str, Any]) -> str:
- info = f" language={quoteattr(token.info)}" if token.info != "" else ""
- return f"\n{escape(token.content)}"
-
-class DocBookConverter(Converter):
- def __renderer__(self, manpage_urls: Mapping[str, str],
- parser: Optional[markdown_it.MarkdownIt]) -> ManualDocBookRenderer:
- return ManualDocBookRenderer('book', manpage_urls, parser)
+class BaseConverter(Converter[md.TR], Generic[md.TR]):
+ # per-converter configuration for ns:arg=value arguments to include blocks, following
+ # the include type. html converters need something like this to support chunking, or
+ # another external method like the chunktocs docbook uses (but block options seem like
+ # a much nicer of doing this).
+ INCLUDE_ARGS_NS: ClassVar[str]
+ INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set()
+ INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set()
_base_paths: list[Path]
- _revision: str
-
- def __init__(self, manpage_urls: Mapping[str, str], revision: str):
- super().__init__(manpage_urls)
- self._revision = revision
+ _current_type: list[TocEntryType]
- def convert(self, file: Path) -> str:
- self._base_paths = [ file ]
+ def convert(self, infile: Path, outfile: Path) -> None:
+ self._base_paths = [ infile ]
+ self._current_type = ['book']
try:
- with open(file, 'r') as f:
- return self._render(f.read())
+ tokens = self._parse(infile.read_text())
+ self._postprocess(infile, outfile, tokens)
+ converted = self._renderer.render(tokens)
+ outfile.write_text(converted)
except Exception as e:
- raise RuntimeError(f"failed to render manual {file}") from e
+ raise RuntimeError(f"failed to render manual {infile}") from e
+
+ def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
+ pass
- def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
- tokens = super()._parse(src, env)
+ def _parse(self, src: str) -> list[Token]:
+ tokens = super()._parse(src)
+ check_structure(self._current_type[-1], tokens)
for token in tokens:
- if token.type != "fence" or not token.info.startswith("{=include=} "):
+ if not is_include(token):
+ continue
+ directive = token.info[12:].split()
+ if not directive:
continue
- typ = token.info[12:].strip()
+ args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) }
+ typ = directive[0]
if typ == 'options':
token.type = 'included_options'
- self._parse_options(token)
- elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]:
- token.type = 'included_' + typ
- self._parse_included_blocks(token, env)
+ self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS)
+ self._parse_options(token, args)
else:
- raise RuntimeError(f"unsupported structural include type '{typ}'")
+ fragment_type = typ.removesuffix('s')
+ if fragment_type not in get_args(FragmentType):
+ raise RuntimeError(f"unsupported structural include type '{typ}'")
+ self._current_type.append(cast(FragmentType, fragment_type))
+ token.type = 'included_' + typ
+ self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS)
+ self._parse_included_blocks(token, args)
+ self._current_type.pop()
return tokens
- def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str, Any]]) -> None:
+ def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None:
+ ns = self.INCLUDE_ARGS_NS + ":"
+ args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) }
+ if unknown := set(args.keys()) - allowed:
+ assert token.map
+ raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown)
+ token.meta['include-args'] = args
+
+ def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
included = token.meta['included'] = []
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
@@ -165,13 +90,13 @@ def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str,
try:
self._base_paths.append(path)
with open(path, 'r') as f:
- tokens = self._parse(f.read(), env)
+ tokens = self._parse(f.read())
included.append((tokens, path))
self._base_paths.pop()
except Exception as e:
raise RuntimeError(f"processing included file {path} from line {lnum}") from e
- def _parse_options(self, token: Token) -> None:
+ def _parse_options(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
items = {}
@@ -194,14 +119,479 @@ def _parse_options(self, token: Token) -> None:
" ".join(items.keys()))
try:
- conv = options.DocBookConverter(
- self._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix)
with open(self._base_paths[-1].parent / source, 'r') as f:
- conv.add_options(json.load(f))
- token.meta['rendered-options'] = conv.finalize(fragment=True)
+ token.meta['id-prefix'] = id_prefix
+ token.meta['list-id'] = varlist_id
+ token.meta['source'] = json.load(f)
except Exception as e:
raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
+class RendererMixin(Renderer):
+ _toplevel_tag: str
+ _revision: str
+
+ def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any):
+ super().__init__(*args, **kwargs)
+ self._toplevel_tag = toplevel_tag
+ self._revision = revision
+ self.rules |= {
+ 'included_sections': lambda *args: self._included_thing("section", *args),
+ 'included_chapters': lambda *args: self._included_thing("chapter", *args),
+ 'included_preface': lambda *args: self._included_thing("preface", *args),
+ 'included_parts': lambda *args: self._included_thing("part", *args),
+ 'included_appendix': lambda *args: self._included_thing("appendix", *args),
+ 'included_options': self.included_options,
+ }
+
+ def render(self, tokens: Sequence[Token]) -> str:
+ # books get special handling because they have *two* title tags. doing this with
+ # generic code is more complicated than it's worth. the checks above have verified
+ # that both titles actually exist.
+ if self._toplevel_tag == 'book':
+ return self._render_book(tokens)
+
+ return super().render(tokens)
+
+ @abstractmethod
+ def _render_book(self, tokens: Sequence[Token]) -> str:
+ raise NotImplementedError()
+
+ @abstractmethod
+ def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+ raise NotImplementedError()
+
+ @abstractmethod
+ def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ raise NotImplementedError()
+
+class ManualDocBookRenderer(RendererMixin, DocBookRenderer):
+ def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]):
+ super().__init__(toplevel_tag, revision, manpage_urls)
+
+ def _render_book(self, tokens: Sequence[Token]) -> str:
+ assert tokens[1].children
+ assert tokens[4].children
+ if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
+ maybe_id = "xml:id=" + xml.quoteattr(maybe_id)
+ return (f''
+ f' {self.renderInline(tokens[1].children)}'
+ f' {self.renderInline(tokens[4].children)}'
+ f' {super(DocBookRenderer, self).render(tokens[6:])}'
+ f'')
+
+ def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
+ (tag, attrs) = super()._heading_tag(token, tokens, i)
+ # render() has already verified that we don't have supernumerary headings and since the
+ # book tag is handled specially we can leave the check this simple
+ if token.tag != 'h1':
+ return (tag, attrs)
+ return (self._toplevel_tag, attrs | {
+ 'xmlns': "http://docbook.org/ns/docbook",
+ 'xmlns:xlink': "http://www.w3.org/1999/xlink",
+ })
+
+ def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
+ result = []
+ # close existing partintro. the generic render doesn't really need this because
+ # it doesn't have a concept of structure in the way the manual does.
+ if self._headings and self._headings[-1] == Heading('part', 1):
+ result.append("")
+ self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
+ # must nest properly for structural includes. this requires saving at least
+ # the headings stack, but creating new renderers is cheap and much easier.
+ r = ManualDocBookRenderer(tag, self._revision, self._manpage_urls)
+ for (included, path) in token.meta['included']:
+ try:
+ result.append(r.render(included))
+ except Exception as e:
+ raise RuntimeError(f"rendering {path}") from e
+ return "".join(result)
+ def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ conv = options.DocBookConverter(self._manpage_urls, self._revision, False, 'fragment',
+ token.meta['list-id'], token.meta['id-prefix'])
+ conv.add_options(token.meta['source'])
+ return conv.finalize(fragment=True)
+
+ # TODO minimize docbook diffs with existing conversions. remove soon.
+ def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return super().paragraph_open(token, tokens, i) + "\n "
+ def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return "\n" + super().paragraph_close(token, tokens, i)
+ def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ return f"\n{xml.escape(token.content)}"
+ def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
+ info = f" language={xml.quoteattr(token.info)}" if token.info != "" else ""
+ return f"\n{xml.escape(token.content)}"
+
+class DocBookConverter(BaseConverter[ManualDocBookRenderer]):
+ INCLUDE_ARGS_NS = "docbook"
+
+ def __init__(self, manpage_urls: Mapping[str, str], revision: str):
+ super().__init__()
+ self._renderer = ManualDocBookRenderer('book', revision, manpage_urls)
+
+
+class HTMLParameters(NamedTuple):
+ generator: str
+ stylesheets: Sequence[str]
+ scripts: Sequence[str]
+ toc_depth: int
+ chunk_toc_depth: int
+
+class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
+ _base_path: Path
+ _html_params: HTMLParameters
+
+ def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters,
+ manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget],
+ base_path: Path):
+ super().__init__(toplevel_tag, revision, manpage_urls, xref_targets)
+ self._base_path, self._html_params = base_path, html_params
+
+ def _push(self, tag: str, hlevel_offset: int) -> Any:
+ result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset)
+ self._hlevel_offset += hlevel_offset
+ self._toplevel_tag, self._headings, self._attrspans = tag, [], []
+ return result
+
+ def _pop(self, state: Any) -> None:
+ (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state
+
+ def _render_book(self, tokens: Sequence[Token]) -> str:
+ assert tokens[4].children
+ title_id = cast(str, tokens[0].attrs.get('id', ""))
+ title = self._xref_targets[title_id].title
+ # subtitles don't have IDs, so we can't use xrefs to get them
+ subtitle = self.renderInline(tokens[4].children)
+
+ toc = TocEntry.of(tokens[0])
+ return "\n".join([
+ self._file_header(toc),
+ ' ',
+ '
',
+ '
',
+ f'
',
+ f'
{subtitle}
',
+ '
',
+ "
",
+ '
',
+ self._build_toc(tokens, 0),
+ super(HTMLRenderer, self).render(tokens[6:]),
+ '
',
+ self._file_footer(toc),
+ ])
+
+ def _file_header(self, toc: TocEntry) -> str:
+ prev_link, up_link, next_link = "", "", ""
+ prev_a, next_a, parent_title = "", "", " "
+ home = toc.root
+ if toc.prev:
+ prev_link = f''
+ prev_a = f'Prev'
+ if toc.parent:
+ up_link = (
+ f''
+ )
+ if (part := toc.parent) and part.kind != 'book':
+ assert part.target.title
+ parent_title = part.target.title
+ if toc.next:
+ next_link = f''
+ next_a = f'Next'
+ return "\n".join([
+ '',
+ '',
+ '',
+ ' ',
+ f' {toc.target.title}',
+ "".join((f''
+ for style in self._html_params.stylesheets)),
+ "".join((f''
+ for script in self._html_params.scripts)),
+ f' ',
+ f' ',
+ f' {up_link}{prev_link}{next_link}',
+ ' ',
+ ' ',
+ ' ',
+ ])
+
+ def _file_footer(self, toc: TocEntry) -> str:
+ # prev, next = self._get_prev_and_next()
+ prev_a, up_a, home_a, next_a = "", " ", " ", ""
+ prev_text, up_text, next_text = "", "", ""
+ home = toc.root
+ if toc.prev:
+ prev_a = f'Prev'
+ assert toc.prev.target.title
+ prev_text = toc.prev.target.title
+ if toc.parent:
+ home_a = f'Home'
+ if toc.parent != home:
+ up_a = f'Up'
+ if toc.next:
+ next_a = f'Next'
+ assert toc.next.target.title
+ next_text = toc.next.target.title
+ return "\n".join([
+ ' ',
+ ' ',
+ '