diff --git a/doc/.gitignore b/doc/.gitignore deleted file mode 100644 index b08285995f66f..0000000000000 --- a/doc/.gitignore +++ /dev/null @@ -1,11 +0,0 @@ -*.chapter.xml -*.section.xml -.version -functions/library/generated -functions/library/locations.xml -highlightjs -manual-full.xml -out -result -result-* -media diff --git a/doc/Makefile b/doc/Makefile deleted file mode 100644 index d5c0eeef69e27..0000000000000 --- a/doc/Makefile +++ /dev/null @@ -1,114 +0,0 @@ -MD_TARGETS=$(addsuffix .xml, $(basename $(shell find . -type f -regex '.*\.md$$' -not -name README.md))) - -PANDOC ?= pandoc - -pandoc_media_dir = media -# NOTE: Keep in sync with conversion script (/maintainers/scripts/db-to-md.sh). -# TODO: Remove raw-attribute when we can get rid of DocBook altogether. -pandoc_commonmark_enabled_extensions = +attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute -# Not needed: -# - docbook-reader/citerefentry-to-rst-role.lua (only relevant for DocBook → MarkDown/rST/MyST) -pandoc_flags = --extract-media=$(pandoc_media_dir) \ - --lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \ - --lua-filter=build-aux/pandoc-filters/myst-reader/roles.lua \ - --lua-filter=$(PANDOC_LINK_MANPAGES_FILTER) \ - --lua-filter=build-aux/pandoc-filters/docbook-writer/rst-roles.lua \ - --lua-filter=build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua \ - -f commonmark$(pandoc_commonmark_enabled_extensions)+smart - -.PHONY: all -all: validate format out/html/index.html out/epub/manual.epub - -.PHONY: render-md -render-md: ${MD_TARGETS} - -.PHONY: debug -debug: - nix-shell --run "xmloscopy --docbook5 ./manual.xml ./manual-full.xml" - -.PHONY: format -format: doc-support/result - find . -iname '*.xml' -type f | while read f; do \ - echo $$f ;\ - xmlformat --config-file "doc-support/result/xmlformat.conf" -i $$f ;\ - done - -.PHONY: fix-misc-xml -fix-misc-xml: - find . -iname '*.xml' -type f \ - -exec ../nixos/doc/varlistentry-fixer.rb {} ';' - -.PHONY: clean -clean: - rm -f ${MD_TARGETS} doc-support/result .version manual-full.xml functions/library/locations.xml functions/library/generated - rm -rf ./out/ ./highlightjs ./media - -.PHONY: validate -validate: manual-full.xml doc-support/result - jing doc-support/result/docbook.rng manual-full.xml - -out/html/index.html: doc-support/result manual-full.xml style.css highlightjs - mkdir -p out/html - xsltproc \ - --nonet --xinclude \ - --output $@ \ - doc-support/result/xhtml.xsl \ - ./manual-full.xml - - mkdir -p out/html/highlightjs/ - cp -r highlightjs out/html/ - - cp -r $(pandoc_media_dir) out/html/ - cp ./overrides.css out/html/ - cp ./style.css out/html/style.css - - mkdir -p out/html/images/callouts - cp doc-support/result/xsl/docbook/images/callouts/*.svg out/html/images/callouts/ - chmod u+w -R out/html/ - -out/epub/manual.epub: epub.xml - mkdir -p out/epub/scratch - xsltproc --nonet \ - --output out/epub/scratch/ \ - doc-support/result/epub.xsl \ - ./epub.xml - - echo "application/epub+zip" > mimetype - zip -0Xq "out/epub/manual.epub" mimetype - rm mimetype - cd "out/epub/scratch/" && zip -Xr9D "../manual.epub" * - rm -rf "out/epub/scratch/" - -highlightjs: doc-support/result - mkdir -p highlightjs - cp -r doc-support/result/highlightjs/highlight.pack.js highlightjs/ - cp -r doc-support/result/highlightjs/LICENSE highlightjs/ - cp -r doc-support/result/highlightjs/mono-blue.css highlightjs/ - cp -r doc-support/result/highlightjs/loader.js highlightjs/ - - -manual-full.xml: ${MD_TARGETS} .version functions/library/locations.xml functions/library/generated *.xml **/*.xml **/**/*.xml - xmllint --nonet --xinclude --noxincludenode manual.xml --output manual-full.xml - -.version: doc-support/result - ln -rfs ./doc-support/result/version .version - -doc-support/result: doc-support/default.nix - (cd doc-support; nix-build) - -functions/library/locations.xml: doc-support/result - ln -rfs ./doc-support/result/function-locations.xml functions/library/locations.xml - -functions/library/generated: doc-support/result - ln -rfs ./doc-support/result/function-docs functions/library/generated - -%.section.xml: %.section.md - $(PANDOC) $^ -t docbook \ - $(pandoc_flags) \ - -o $@ - -%.chapter.xml: %.chapter.md - $(PANDOC) $^ -t docbook \ - --top-level-division=chapter \ - $(pandoc_flags) \ - -o $@ diff --git a/doc/build-aux/pandoc-filters/docbook-reader/citerefentry-to-rst-role.lua b/doc/build-aux/pandoc-filters/docbook-reader/citerefentry-to-rst-role.lua deleted file mode 100644 index 281e85af27178..0000000000000 --- a/doc/build-aux/pandoc-filters/docbook-reader/citerefentry-to-rst-role.lua +++ /dev/null @@ -1,23 +0,0 @@ ---[[ -Converts Code AST nodes produced by pandoc’s DocBook reader -from citerefentry elements into AST for corresponding role -for reStructuredText. - -We use subset of MyST syntax (CommonMark with features from rST) -so let’s use the rST AST for rST features. - -Reference: https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-manpage -]] - -function Code(elem) - elem.classes = elem.classes:map(function (x) - if x == 'citerefentry' then - elem.attributes['role'] = 'manpage' - return 'interpreted-text' - else - return x - end - end) - - return elem -end diff --git a/doc/build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua b/doc/build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua deleted file mode 100644 index fa97729a28bcf..0000000000000 --- a/doc/build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua +++ /dev/null @@ -1,34 +0,0 @@ ---[[ -Converts Link AST nodes with empty label to DocBook xref elements. - -This is a temporary script to be able use cross-references conveniently -using syntax taken from MyST, while we still use docbook-xsl -for generating the documentation. - -Reference: https://myst-parser.readthedocs.io/en/latest/using/syntax.html#targets-and-cross-referencing -]] - -local function starts_with(start, str) - return str:sub(1, #start) == start -end - -local function escape_xml_arg(arg) - amps = arg:gsub('&', '&') - amps_quotes = amps:gsub('"', '"') - amps_quotes_lt = amps_quotes:gsub('<', '<') - - return amps_quotes_lt -end - -function Link(elem) - has_no_content = #elem.content == 0 - targets_anchor = starts_with('#', elem.target) - has_no_attributes = elem.title == '' and elem.identifier == '' and #elem.classes == 0 and #elem.attributes == 0 - - if has_no_content and targets_anchor and has_no_attributes then - -- xref expects idref without the pound-sign - target_without_hash = elem.target:sub(2, #elem.target) - - return pandoc.RawInline('docbook', '') - end -end diff --git a/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua b/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua deleted file mode 100644 index 5c1b034d0792d..0000000000000 --- a/doc/build-aux/pandoc-filters/docbook-writer/rst-roles.lua +++ /dev/null @@ -1,44 +0,0 @@ ---[[ -Converts AST for reStructuredText roles into corresponding -DocBook elements. - -Currently, only a subset of roles is supported. - -Reference: - List of roles: - https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html - manpage: - https://tdg.docbook.org/tdg/5.1/citerefentry.html - file: - https://tdg.docbook.org/tdg/5.1/filename.html -]] - -function Code(elem) - if elem.classes:includes('interpreted-text') then - local tag = nil - local content = elem.text - if elem.attributes['role'] == 'manpage' then - tag = 'citerefentry' - local title, volnum = content:match('^(.+)%((%w+)%)$') - if title == nil then - -- No volnum in parentheses. - title = content - end - content = '' .. title .. '' .. (volnum ~= nil and ('' .. volnum .. '') or '') - elseif elem.attributes['role'] == 'file' then - tag = 'filename' - elseif elem.attributes['role'] == 'command' then - tag = 'command' - elseif elem.attributes['role'] == 'option' then - tag = 'option' - elseif elem.attributes['role'] == 'var' then - tag = 'varname' - elseif elem.attributes['role'] == 'env' then - tag = 'envar' - end - - if tag ~= nil then - return pandoc.RawInline('docbook', '<' .. tag .. '>' .. content .. '') - end - end -end diff --git a/doc/build-aux/pandoc-filters/link-manpages.nix b/doc/build-aux/pandoc-filters/link-manpages.nix deleted file mode 100644 index 2589a7c34251f..0000000000000 --- a/doc/build-aux/pandoc-filters/link-manpages.nix +++ /dev/null @@ -1,28 +0,0 @@ -{ pkgs ? import ../../.. {} }: -let - inherit (pkgs) lib; - manpageURLs = lib.importJSON (pkgs.path + "/doc/manpage-urls.json"); -in pkgs.writeText "link-manpages.lua" '' - --[[ - Adds links to known man pages that aren't already in a link. - ]] - - local manpage_urls = { - ${lib.concatStringsSep "\n" (lib.mapAttrsToList (man: url: - " [${builtins.toJSON man}] = ${builtins.toJSON url},") manpageURLs)} - } - - traverse = 'topdown' - - -- Returning false as the second value aborts processing of child elements. - function Link(elem) - return elem, false - end - - function Code(elem) - local is_man_role = elem.classes:includes('interpreted-text') and elem.attributes['role'] == 'manpage' - if is_man_role and manpage_urls[elem.text] ~= nil then - return pandoc.Link(elem, manpage_urls[elem.text]), false - end - end -'' diff --git a/doc/build-aux/pandoc-filters/myst-reader/roles.lua b/doc/build-aux/pandoc-filters/myst-reader/roles.lua deleted file mode 100644 index f4ef6d390b40b..0000000000000 --- a/doc/build-aux/pandoc-filters/myst-reader/roles.lua +++ /dev/null @@ -1,36 +0,0 @@ ---[[ -Replaces Str AST nodes containing {role}, followed by a Code node -by a Code node with attrs that would be produced by rST reader -from the role syntax. - -This is to emulate MyST syntax in Pandoc. -(MyST is a CommonMark flavour with rST features mixed in.) - -Reference: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point -]] - -function Inlines(inlines) - for i = #inlines-1,1,-1 do - local first = inlines[i] - local second = inlines[i+1] - local correct_tags = first.tag == 'Str' and second.tag == 'Code' - if correct_tags then - -- docutils supports alphanumeric strings separated by [-._:] - -- We are slightly more liberal for simplicity. - -- Allow preceding punctuation (eg '('), otherwise '({file}`...`)' - -- does not match. Also allow anything followed by a non-breaking space - -- since pandoc emits those after certain abbreviations (e.g. e.g.). - local prefix, role = first.text:match('^(.*){([-._+:%w]+)}$') - if role ~= nil and (prefix == '' or prefix:match("^.*[%p ]$") ~= nil) then - if prefix == '' then - inlines:remove(i) - else - first.text = prefix - end - second.attributes['role'] = role - second.classes:insert('interpreted-text') - end - end - end - return inlines -end diff --git a/doc/build-aux/pandoc-filters/myst-writer/roles.lua b/doc/build-aux/pandoc-filters/myst-writer/roles.lua deleted file mode 100644 index 0136bc5506529..0000000000000 --- a/doc/build-aux/pandoc-filters/myst-writer/roles.lua +++ /dev/null @@ -1,25 +0,0 @@ ---[[ -Replaces Code nodes with attrs that would be produced by rST reader -from the role syntax by a Str AST node containing {role}, followed by a Code node. - -This is to emulate MyST syntax in Pandoc. -(MyST is a CommonMark flavour with rST features mixed in.) - -Reference: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point -]] - -function Code(elem) - local role = elem.attributes['role'] - - if elem.classes:includes('interpreted-text') and role ~= nil then - elem.classes = elem.classes:filter(function (c) - return c ~= 'interpreted-text' - end) - elem.attributes['role'] = nil - - return { - pandoc.Str('{' .. role .. '}'), - elem, - } - end -end diff --git a/doc/builders.md b/doc/builders.md new file mode 100644 index 0000000000000..2e959422405b2 --- /dev/null +++ b/doc/builders.md @@ -0,0 +1,12 @@ +# Builders {#part-builders} + +```{=include=} chapters +builders/fetchers.chapter.md +builders/trivial-builders.chapter.md +builders/testers.chapter.md +builders/special.md +builders/images.md +hooks/index.md +languages-frameworks/index.md +builders/packages/index.md +``` diff --git a/doc/builders/images.md b/doc/builders/images.md new file mode 100644 index 0000000000000..5596784bfa487 --- /dev/null +++ b/doc/builders/images.md @@ -0,0 +1,13 @@ +# Images {#chap-images} + +This chapter describes tools for creating various types of images. + +```{=include=} sections +images/appimagetools.section.md +images/dockertools.section.md +images/ocitools.section.md +images/snaptools.section.md +images/portableservice.section.md +images/makediskimage.section.md +images/binarycache.section.md +``` diff --git a/doc/builders/images.xml b/doc/builders/images.xml deleted file mode 100644 index a4661ab5a7af7..0000000000000 --- a/doc/builders/images.xml +++ /dev/null @@ -1,15 +0,0 @@ - - Images - - This chapter describes tools for creating various types of images. - - - - - - - - - diff --git a/doc/builders/packages/dlib.section.md b/doc/builders/packages/dlib.section.md index 022195310a710..bd5b1a20a4d46 100644 --- a/doc/builders/packages/dlib.section.md +++ b/doc/builders/packages/dlib.section.md @@ -1,6 +1,6 @@ # DLib {#dlib} -[DLib](http://dlib.net/) is a modern, C++-based toolkit which provides several machine learning algorithms. +[DLib](http://dlib.net/) is a modern, C++\-based toolkit which provides several machine learning algorithms. ## Compiling without AVX support {#compiling-without-avx-support} diff --git a/doc/builders/packages/index.md b/doc/builders/packages/index.md new file mode 100644 index 0000000000000..1f44357024064 --- /dev/null +++ b/doc/builders/packages/index.md @@ -0,0 +1,27 @@ +# Packages {#chap-packages} + +This chapter contains information about how to use and maintain the Nix expressions for a number of specific packages, such as the Linux kernel or X.org. + +```{=include=} sections +citrix.section.md +dlib.section.md +eclipse.section.md +elm.section.md +emacs.section.md +firefox.section.md +fish.section.md +fuse.section.md +ibus.section.md +kakoune.section.md +linux.section.md +locales.section.md +etc-files.section.md +nginx.section.md +opengl.section.md +shell-helpers.section.md +steam.section.md +cataclysm-dda.section.md +urxvt.section.md +weechat.section.md +xorg.section.md +``` diff --git a/doc/builders/packages/index.xml b/doc/builders/packages/index.xml deleted file mode 100644 index 206e1e49f1f85..0000000000000 --- a/doc/builders/packages/index.xml +++ /dev/null @@ -1,29 +0,0 @@ - - Packages - - This chapter contains information about how to use and maintain the Nix expressions for a number of specific packages, such as the Linux kernel or X.org. - - - - - - - - - - - - - - - - - - - - - - - diff --git a/doc/builders/special.md b/doc/builders/special.md new file mode 100644 index 0000000000000..6d07fa87f3f3e --- /dev/null +++ b/doc/builders/special.md @@ -0,0 +1,11 @@ +# Special builders {#chap-special} + +This chapter describes several special builders. + +```{=include=} sections +special/fhs-environments.section.md +special/makesetuphook.section.md +special/mkshell.section.md +special/darwin-builder.section.md +special/vm-tools.section.md +``` diff --git a/doc/builders/special.xml b/doc/builders/special.xml deleted file mode 100644 index 18cf6cfd39c70..0000000000000 --- a/doc/builders/special.xml +++ /dev/null @@ -1,13 +0,0 @@ - - Special builders - - This chapter describes several special builders. - - - - - - - diff --git a/doc/contributing.md b/doc/contributing.md new file mode 100644 index 0000000000000..3215dbe32bec0 --- /dev/null +++ b/doc/contributing.md @@ -0,0 +1,10 @@ +# Contributing to Nixpkgs {#part-contributing} + +```{=include=} chapters +contributing/quick-start.chapter.md +contributing/coding-conventions.chapter.md +contributing/submitting-changes.chapter.md +contributing/vulnerability-roundup.chapter.md +contributing/reviewing-contributions.chapter.md +contributing/contributing-to-documentation.chapter.md +``` diff --git a/doc/contributing/staging-workflow.dot b/doc/contributing/staging-workflow.dot new file mode 100644 index 0000000000000..faca7a1cad4c7 --- /dev/null +++ b/doc/contributing/staging-workflow.dot @@ -0,0 +1,16 @@ +digraph { + "small changes" [shape=none] + "mass-rebuilds and other large changes" [shape=none] + "critical security fixes" [shape=none] + "broken staging-next fixes" [shape=none] + + "small changes" -> master + "mass-rebuilds and other large changes" -> staging + "critical security fixes" -> master + "broken staging-next fixes" -> "staging-next" + + "staging-next" -> master [color="#E85EB0"] [label="stabilization ends"] [fontcolor="#E85EB0"] + "staging" -> "staging-next" [color="#E85EB0"] [label="stabilization starts"] [fontcolor="#E85EB0"] + + master -> "staging-next" -> staging [color="#5F5EE8"] [label="every six hours (GitHub Action)"] [fontcolor="#5F5EE8"] +} diff --git a/doc/contributing/staging-workflow.svg b/doc/contributing/staging-workflow.svg new file mode 100644 index 0000000000000..1a174a78830e4 --- /dev/null +++ b/doc/contributing/staging-workflow.svg @@ -0,0 +1,102 @@ + + + + + + + + + +small changes +small changes + + + +master + +master + + + +small changes->master + + + + + +mass-rebuilds and other large changes +mass-rebuilds and other large changes + + + +staging + +staging + + + +mass-rebuilds and other large changes->staging + + + + + +critical security fixes +critical security fixes + + + +critical security fixes->master + + + + + +broken staging-next fixes +broken staging-next fixes + + + +staging-next + +staging-next + + + +broken staging-next fixes->staging-next + + + + + +master->staging-next + + +every six hours (GitHub Action) + + + +staging->staging-next + + +stabilization starts + + + +staging-next->master + + +stabilization ends + + + +staging-next->staging + + +every six hours (GitHub Action) + + + diff --git a/doc/contributing/submitting-changes.chapter.md b/doc/contributing/submitting-changes.chapter.md index 30fe4fa47d0d1..8e92686c82d32 100644 --- a/doc/contributing/submitting-changes.chapter.md +++ b/doc/contributing/submitting-changes.chapter.md @@ -214,24 +214,11 @@ The last checkbox is fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blo - Hydra builds for master and staging should not be used as testing platform, it’s a build farm for changes that have been already tested. - When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break people’s installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from \@edolstra. -```{.graphviz caption="Staging workflow"} -digraph { - "small changes" [shape=none] - "mass-rebuilds and other large changes" [shape=none] - "critical security fixes" [shape=none] - "broken staging-next fixes" [shape=none] - - "small changes" -> master - "mass-rebuilds and other large changes" -> staging - "critical security fixes" -> master - "broken staging-next fixes" -> "staging-next" - - "staging-next" -> master [color="#E85EB0"] [label="stabilization ends"] [fontcolor="#E85EB0"] - "staging" -> "staging-next" [color="#E85EB0"] [label="stabilization starts"] [fontcolor="#E85EB0"] - - master -> "staging-next" -> staging [color="#5F5EE8"] [label="every six hours (GitHub Action)"] [fontcolor="#5F5EE8"] -} -``` +::: {.figure #fig-staging-workflow} +# Staging workflow + +![Staging workflow](./staging-workflow.svg) +::: [This GitHub Action](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/periodic-merge-6h.yml) brings changes from `master` to `staging-next` and from `staging-next` to `staging` every 6 hours; these are the blue arrows in the diagram above. The purple arrows in the diagram above are done manually and much less frequently. You can get an idea of how often these merges occur by looking at the git history. diff --git a/doc/default.nix b/doc/default.nix index 86b4a8d978146..5954e4495adb9 100644 --- a/doc/default.nix +++ b/doc/default.nix @@ -1,69 +1,145 @@ { pkgs ? (import ./.. { }), nixpkgs ? { }}: let - doc-support = import ./doc-support { inherit pkgs nixpkgs; }; + inherit (pkgs) lib; + inherit (lib) hasPrefix removePrefix; + + lib-docs = import ./doc-support/lib-function-docs.nix { + inherit pkgs nixpkgs; + libsets = [ + { name = "asserts"; description = "assertion functions"; } + { name = "attrsets"; description = "attribute set functions"; } + { name = "strings"; description = "string manipulation functions"; } + { name = "versions"; description = "version string functions"; } + { name = "trivial"; description = "miscellaneous functions"; } + { name = "lists"; description = "list manipulation functions"; } + { name = "debug"; description = "debugging functions"; } + { name = "options"; description = "NixOS / nixpkgs option handling"; } + { name = "path"; description = "path functions"; } + { name = "filesystem"; description = "filesystem functions"; } + { name = "sources"; description = "source filtering functions"; } + { name = "cli"; description = "command-line serialization functions"; } + ]; + }; + + epub = pkgs.runCommand "manual.epub" { + nativeBuildInputs = with pkgs; [ libxslt zip ]; + + epub = '' + + + Nixpkgs Manual + Version ${pkgs.lib.version} + + + Temporarily unavailable + + The Nixpkgs manual is currently not available in EPUB format, + please use the HTML manual + instead. + + + If you've used the EPUB manual in the past and it has been useful to you, please + let us know. + + + + ''; + + passAsFile = [ "epub" ]; + } '' + mkdir scratch + xsltproc \ + --param chapter.autolabel 0 \ + --nonet \ + --output scratch/ \ + ${pkgs.docbook_xsl_ns}/xml/xsl/docbook/epub/docbook.xsl \ + $epubPath + + echo "application/epub+zip" > mimetype + zip -0Xq "$out" mimetype + cd scratch && zip -Xr9D "$out" * + ''; + + # NB: This file describes the Nixpkgs manual, which happens to use module + # docs infra originally developed for NixOS. + optionsDoc = pkgs.nixosOptionsDoc { + inherit (pkgs.lib.evalModules { + modules = [ ../pkgs/top-level/config.nix ]; + class = "nixpkgsConfig"; + }) options; + documentType = "none"; + transformOptions = opt: + opt // { + declarations = + map + (decl: + if hasPrefix (toString ../..) (toString decl) + then + let subpath = removePrefix "/" (removePrefix (toString ../.) (toString decl)); + in { url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}"; name = subpath; } + else decl) + opt.declarations; + }; + }; in pkgs.stdenv.mkDerivation { name = "nixpkgs-manual"; nativeBuildInputs = with pkgs; [ - pandoc - graphviz - libxml2 - libxslt - zip - jing - xmlformat + nixos-render-docs ]; - src = pkgs.nix-gitignore.gitignoreSource [] ./.; + src = ./.; postPatch = '' - ln -s ${doc-support} ./doc-support/result + ln -s ${optionsDoc.optionsJSON}/share/doc/nixos/options.json ./config-options.json ''; - epub = '' - - - Nixpkgs Manual - Version ${pkgs.lib.version} - - - Temporarily unavailable - - The Nixpkgs manual is currently not available in EPUB format, - please use the HTML manual - instead. - - - If you've used the EPUB manual in the past and it has been useful to you, please - let us know. - - - - ''; - passAsFile = [ "epub" ]; + buildPhase = '' + cat \ + ./functions/library.md.in \ + ${lib-docs}/index.md \ + > ./functions/library.md + substitute ./manual.md.in ./manual.md \ + --replace '@MANUAL_VERSION@' '${pkgs.lib.version}' + + mkdir -p out/media - preBuild = '' - cp $epubPath epub.xml - make -j$NIX_BUILD_CORES render-md + mkdir -p out/highlightjs + cp -t out/highlightjs \ + ${pkgs.documentation-highlighter}/highlight.pack.js \ + ${pkgs.documentation-highlighter}/LICENSE \ + ${pkgs.documentation-highlighter}/mono-blue.css \ + ${pkgs.documentation-highlighter}/loader.js + + cp -t out ./overrides.css ./style.css + + nixos-render-docs manual html \ + --manpage-urls ./manpage-urls.json \ + --revision ${pkgs.lib.trivial.revisionWithDefault (pkgs.rev or "master")} \ + --stylesheet style.css \ + --stylesheet overrides.css \ + --stylesheet highlightjs/mono-blue.css \ + --script ./highlightjs/highlight.pack.js \ + --script ./highlightjs/loader.js \ + --toc-depth 1 \ + --section-toc-depth 1 \ + manual.md \ + out/index.html ''; installPhase = '' dest="$out/share/doc/nixpkgs" mkdir -p "$(dirname "$dest")" - mv out/html "$dest" + mv out "$dest" mv "$dest/index.html" "$dest/manual.html" - mv out/epub/manual.epub "$dest/nixpkgs-manual.epub" + cp ${epub} "$dest/nixpkgs-manual.epub" mkdir -p $out/nix-support/ echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products ''; - - # Environment variables - PANDOC_LUA_FILTERS_DIR = "${pkgs.pandoc-lua-filters}/share/pandoc/filters"; - PANDOC_LINK_MANPAGES_FILTER = import build-aux/pandoc-filters/link-manpages.nix { inherit pkgs; }; } diff --git a/doc/doc-support/default.nix b/doc/doc-support/default.nix deleted file mode 100644 index cfa7cbdc82839..0000000000000 --- a/doc/doc-support/default.nix +++ /dev/null @@ -1,87 +0,0 @@ -{ pkgs ? (import ../.. {}), nixpkgs ? { }}: -let - inherit (pkgs) lib; - inherit (lib) hasPrefix removePrefix; - - libsets = [ - { name = "asserts"; description = "assertion functions"; } - { name = "attrsets"; description = "attribute set functions"; } - { name = "strings"; description = "string manipulation functions"; } - { name = "versions"; description = "version string functions"; } - { name = "trivial"; description = "miscellaneous functions"; } - { name = "lists"; description = "list manipulation functions"; } - { name = "debug"; description = "debugging functions"; } - { name = "options"; description = "NixOS / nixpkgs option handling"; } - { name = "path"; description = "path functions"; } - { name = "filesystem"; description = "filesystem functions"; } - { name = "sources"; description = "source filtering functions"; } - { name = "cli"; description = "command-line serialization functions"; } - ]; - - locationsXml = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; }; - functionDocs = import ./lib-function-docs.nix { inherit locationsXml pkgs libsets; }; - version = pkgs.lib.version; - - epub-xsl = pkgs.writeText "epub.xsl" '' - - - - - - ''; - - xhtml-xsl = pkgs.writeText "xhtml.xsl" '' - - - - - - ''; - - # NB: This file describes the Nixpkgs manual, which happens to use module - # docs infra originally developed for NixOS. - optionsDoc = pkgs.nixosOptionsDoc { - inherit (pkgs.lib.evalModules { - modules = [ ../../pkgs/top-level/config.nix ]; - class = "nixpkgsConfig"; - }) options; - documentType = "none"; - transformOptions = opt: - opt // { - declarations = - map - (decl: - if hasPrefix (toString ../..) (toString decl) - then - let subpath = removePrefix "/" (removePrefix (toString ../..) (toString decl)); - in { url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}"; name = subpath; } - else decl) - opt.declarations; - }; - }; - -in pkgs.runCommand "doc-support" {} -'' - mkdir result - ( - cd result - ln -s ${locationsXml} ./function-locations.xml - ln -s ${functionDocs} ./function-docs - ln -s ${optionsDoc.optionsDocBook} ./config-options.docbook.xml - - ln -s ${pkgs.docbook5}/xml/rng/docbook/docbook.rng ./docbook.rng - ln -s ${pkgs.docbook_xsl_ns}/xml/xsl ./xsl - ln -s ${epub-xsl} ./epub.xsl - ln -s ${xhtml-xsl} ./xhtml.xsl - - ln -s ${./xmlformat.conf} ./xmlformat.conf - ln -s ${pkgs.documentation-highlighter} ./highlightjs - - echo -n "${version}" > ./version - ) - mv result $out -'' diff --git a/doc/doc-support/lib-function-docs.nix b/doc/doc-support/lib-function-docs.nix index cf218fa704017..018b0bd5e9453 100644 --- a/doc/doc-support/lib-function-docs.nix +++ b/doc/doc-support/lib-function-docs.nix @@ -1,8 +1,13 @@ # Generates the documentation for library functions via nixdoc. -{ pkgs, locationsXml, libsets }: +{ pkgs, nixpkgs, libsets }: -with pkgs; stdenv.mkDerivation { +with pkgs; + +let + locationsJSON = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; }; +in +stdenv.mkDerivation { name = "nixpkgs-lib-docs"; src = ../../lib; @@ -11,26 +16,23 @@ with pkgs; stdenv.mkDerivation { function docgen { # TODO: wrap lib.$1 in , make nixdoc not escape it if [[ -e "../lib/$1.nix" ]]; then - nixdoc -c "$1" -d "lib.$1: $2" -f "$1.nix" > "$out/$1.xml" + nixdoc -c "$1" -d "lib.$1: $2" -l ${locationsJSON} -f "$1.nix" > "$out/$1.md" else - nixdoc -c "$1" -d "lib.$1: $2" -f "$1/default.nix" > "$out/$1.xml" + nixdoc -c "$1" -d "lib.$1: $2" -l ${locationsJSON} -f "$1/default.nix" > "$out/$1.md" fi - echo "" >> "$out/index.xml" + echo "$out/$1.md" >> "$out/index.md" } mkdir -p "$out" - cat > "$out/index.xml" << 'EOF' - - + cat > "$out/index.md" << 'EOF' + ```{=include=} sections EOF ${lib.concatMapStrings ({ name, description }: '' docgen ${name} ${lib.escapeShellArg description} '') libsets} - echo "" >> "$out/index.xml" - - ln -s ${locationsXml} $out/locations.xml + echo '```' >> "$out/index.md" ''; } diff --git a/doc/doc-support/lib-function-locations.nix b/doc/doc-support/lib-function-locations.nix index 1ee59648330aa..e6794617fdd89 100644 --- a/doc/doc-support/lib-function-locations.nix +++ b/doc/doc-support/lib-function-locations.nix @@ -58,28 +58,18 @@ let [ "-prime" ]; urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}"; - xmlstrings = (nixpkgsLib.strings.concatMapStrings - ({ name, value }: - '' -
${name} - - Located at - ${value.file}:${builtins.toString value.line} - in <nixpkgs>. - -
- '') - relativeLocs); + jsonLocs = builtins.listToAttrs + (builtins.map + ({ name, value }: { + name = sanitizeId name; + value = + let + text = "${value.file}:${builtins.toString value.line}"; + target = "${urlPrefix}/${value.file}#L${builtins.toString value.line}"; + in + "[${text}](${target}) in ``"; + }) + relativeLocs); -in pkgs.writeText - "locations.xml" - '' -
- All the locations for every lib function - This file is only for inclusion by other files. - ${xmlstrings} -
- '' +in +pkgs.writeText "locations.json" (builtins.toJSON jsonLocs) diff --git a/doc/doc-support/parameters.xml b/doc/doc-support/parameters.xml deleted file mode 100644 index 5b39d2f7f1a58..0000000000000 --- a/doc/doc-support/parameters.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/doc/doc-support/xmlformat.conf b/doc/doc-support/xmlformat.conf deleted file mode 100644 index c3f39c7fd81b8..0000000000000 --- a/doc/doc-support/xmlformat.conf +++ /dev/null @@ -1,72 +0,0 @@ -# -# DocBook Configuration file for "xmlformat" -# see http://www.kitebird.com/software/xmlformat/ -# 10 Sept. 2004 -# - -# Only block elements -ackno address appendix article biblioentry bibliography bibliomixed \ -biblioset blockquote book bridgehead callout calloutlist caption caution \ -chapter chapterinfo classsynopsis cmdsynopsis colophon constraintdef \ -constructorsynopsis dedication destructorsynopsis entry epigraph equation example \ -figure formalpara funcsynopsis glossary glossdef glossdiv glossentry glosslist \ -glosssee glossseealso graphic graphicco highlights imageobjectco important \ -index indexdiv indexentry indexinfo info informalequation informalexample \ -informalfigure informaltable legalnotice literallayout lot lotentry mediaobject \ -mediaobjectco msgmain msgset note orderedlist para part preface primaryie \ -procedure qandadiv qandaentry qandaset refentry refentrytitle reference \ -refnamediv refsect1 refsect2 refsect3 refsection revhistory screenshot sect1 \ -sect2 sect3 sect4 sect5 section seglistitem set setindex sidebar simpara \ -simplesect step substeps synopfragment synopsis table term title \ -toc variablelist varlistentry warning itemizedlist listitem \ -footnote colspec partintro row simplelist subtitle tbody tgroup thead tip - format block - normalize no - - -#appendix bibliography chapter glossary preface reference -# element-break 3 - -sect1 section - element-break 2 - - -# -para abstract - format block - entry-break 1 - exit-break 1 - normalize yes - -title - format block - normalize = yes - entry-break = 0 - exit-break = 0 - -# Inline elements -abbrev accel acronym action application citation citebiblioid citerefentry citetitle \ -classname co code command computeroutput constant country database date email emphasis \ -envar errorcode errorname errortext errortype exceptionname fax filename \ -firstname firstterm footnoteref foreignphrase funcdef funcparams function \ -glossterm group guibutton guiicon guilabel guimenu guimenuitem guisubmenu \ -hardware holder honorific indexterm inlineequation inlinegraphic inlinemediaobject \ -interface interfacename \ -keycap keycode keycombo keysym lineage link literal manvolnum markup medialabel \ -menuchoice methodname methodparam modifier mousebutton olink ooclass ooexception \ -oointerface option optional otheraddr othername package paramdef parameter personname \ -phrase pob postcode productname prompt property quote refpurpose replaceable \ -returnvalue revnumber sgmltag state street structfield structname subscript \ -superscript surname symbol systemitem token trademark type ulink userinput \ -uri varargs varname void wordasword xref year mathphrase member tag - format inline - -programlisting screen - format verbatim - entry-break = 0 - exit-break = 0 - -# This is needed so that the spacing inside those tags is kept. -term cmdsynopsis arg - normalize yes - format block diff --git a/doc/functions.md b/doc/functions.md new file mode 100644 index 0000000000000..09033c9e3c199 --- /dev/null +++ b/doc/functions.md @@ -0,0 +1,11 @@ +# Functions reference {#chap-functions} + +The nixpkgs repository has several utility functions to manipulate Nix expressions. + +```{=include=} sections +functions/library.md +functions/generators.section.md +functions/debug.section.md +functions/prefer-remote-fetch.section.md +functions/nix-gitignore.section.md +``` diff --git a/doc/functions.xml b/doc/functions.xml deleted file mode 100644 index 8ef530d307cdc..0000000000000 --- a/doc/functions.xml +++ /dev/null @@ -1,14 +0,0 @@ - - Functions reference - - The nixpkgs repository has several utility functions to manipulate Nix expressions. - - - - - - - diff --git a/doc/functions/library.md.in b/doc/functions/library.md.in new file mode 100644 index 0000000000000..e17de86feb8ae --- /dev/null +++ b/doc/functions/library.md.in @@ -0,0 +1,5 @@ +# Nixpkgs Library Functions {#sec-functions-library} + +Nixpkgs provides a standard library at `pkgs.lib`, or through `import `. + + diff --git a/doc/functions/library.xml b/doc/functions/library.xml deleted file mode 100644 index 788ea0b94f1fe..0000000000000 --- a/doc/functions/library.xml +++ /dev/null @@ -1,14 +0,0 @@ -
- Nixpkgs Library Functions - - - Nixpkgs provides a standard library at pkgs.lib, or through import <nixpkgs/lib>. - - - - -
diff --git a/doc/hooks/index.md b/doc/hooks/index.md new file mode 100644 index 0000000000000..c1e86a3033073 --- /dev/null +++ b/doc/hooks/index.md @@ -0,0 +1,33 @@ +# Hooks reference {#chap-hooks} + +Nixpkgs has several hook packages that augment the stdenv phases. + +The stdenv built-in hooks are documented in [](#ssec-setup-hooks). + +```{=include=} sections +autoconf.section.md +automake.section.md +autopatchelf.section.md +breakpoint.section.md +cmake.section.md +gdk-pixbuf.section.md +ghc.section.md +gnome.section.md +installShellFiles.section.md +libiconv.section.md +libxml2.section.md +meson.section.md +ninja.section.md +patch-rc-path-hooks.section.md +perl.section.md +pkg-config.section.md +postgresql-test-hook.section.md +python.section.md +qt-4.section.md +scons.section.md +tetex-tex-live.section.md +unzip.section.md +validatePkgConfig.section.md +waf.section.md +xcbuild.section.md +``` diff --git a/doc/hooks/index.xml b/doc/hooks/index.xml deleted file mode 100644 index 0917fac6c0ac0..0000000000000 --- a/doc/hooks/index.xml +++ /dev/null @@ -1,37 +0,0 @@ - - Hooks reference - - Nixpkgs has several hook packages that augment the stdenv phases. - - - The stdenv built-in hooks are documented in . - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/doc/languages-frameworks/index.md b/doc/languages-frameworks/index.md new file mode 100644 index 0000000000000..cdbf08f1791bf --- /dev/null +++ b/doc/languages-frameworks/index.md @@ -0,0 +1,45 @@ +# Languages and frameworks {#chap-language-support} + +The [standard build environment](#chap-stdenv) makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accommodated by overriding the appropriate phases of `stdenv`. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter. + +```{=include=} sections +agda.section.md +android.section.md +beam.section.md +bower.section.md +chicken.section.md +coq.section.md +crystal.section.md +cuda.section.md +cuelang.section.md +dart.section.md +dhall.section.md +dotnet.section.md +emscripten.section.md +gnome.section.md +go.section.md +haskell.section.md +hy.section.md +idris.section.md +ios.section.md +java.section.md +javascript.section.md +lisp.section.md +lua.section.md +maven.section.md +nim.section.md +ocaml.section.md +octave.section.md +perl.section.md +php.section.md +pkg-config.section.md +python.section.md +qt.section.md +r.section.md +ruby.section.md +rust.section.md +swift.section.md +texlive.section.md +titanium.section.md +vim.section.md +``` diff --git a/doc/languages-frameworks/index.xml b/doc/languages-frameworks/index.xml deleted file mode 100644 index 94c4e303027f7..0000000000000 --- a/doc/languages-frameworks/index.xml +++ /dev/null @@ -1,47 +0,0 @@ - - Languages and frameworks - - The standard build environment makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accommodated by overriding the appropriate phases of stdenv. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/doc/lib.md b/doc/lib.md new file mode 100644 index 0000000000000..2c3105333ed09 --- /dev/null +++ b/doc/lib.md @@ -0,0 +1,6 @@ +# Nixpkgs `lib` {#id-1.4} + +```{=include=} chapters +functions.md +module-system/module-system.chapter.md +``` diff --git a/doc/manual.md.in b/doc/manual.md.in new file mode 100644 index 0000000000000..a4a73a913097a --- /dev/null +++ b/doc/manual.md.in @@ -0,0 +1,14 @@ +# Nixpkgs Manual {#nixpkgs-manual} +## Version @MANUAL_VERSION@ + +```{=include=} chapters +preface.chapter.md +``` + +```{=include=} parts +using-nixpkgs.md +lib.md +stdenv.md +builders.md +contributing.md +``` diff --git a/doc/manual.xml b/doc/manual.xml deleted file mode 100644 index de3d40f553c03..0000000000000 --- a/doc/manual.xml +++ /dev/null @@ -1,49 +0,0 @@ - - - Nixpkgs Manual - Version - - - - - Using Nixpkgs - - - - - - Nixpkgs <code>lib</code> - - - - - Standard environment - - - - - - - - Builders - - - - - - - - - - - Contributing to Nixpkgs - - - - - - - - diff --git a/doc/shell.nix b/doc/shell.nix deleted file mode 100644 index 5fa2b4424899a..0000000000000 --- a/doc/shell.nix +++ /dev/null @@ -1,3 +0,0 @@ -{ pkgs ? import ../. { } }: -(import ./default.nix { }).overrideAttrs -(x: { buildInputs = (x.buildInputs or [ ]) ++ [ pkgs.xmloscopy pkgs.ruby ]; }) diff --git a/doc/stdenv.md b/doc/stdenv.md new file mode 100644 index 0000000000000..1ef81f84b5141 --- /dev/null +++ b/doc/stdenv.md @@ -0,0 +1,9 @@ +# Standard environment {#part-stdenv} + +```{=include=} chapters +stdenv/stdenv.chapter.md +stdenv/meta.chapter.md +stdenv/multiple-output.chapter.md +stdenv/cross-compilation.chapter.md +stdenv/platform-notes.chapter.md +``` diff --git a/doc/stdenv/stdenv.chapter.md b/doc/stdenv/stdenv.chapter.md index 71d28282e9b25..a0f81b97f6bcb 100644 --- a/doc/stdenv/stdenv.chapter.md +++ b/doc/stdenv/stdenv.chapter.md @@ -464,10 +464,8 @@ The commit object contains the following values: If the returned array contains exactly one object (e.g. `[{}]`), all values are optional and will be determined automatically. -```{=docbook} - -Standard output of an update script using commit feature -``` +::: {.example #var-passthru-updateScript-example-commit} +# Standard output of an update script using commit feature ```json [ @@ -481,10 +479,7 @@ If the returned array contains exactly one object (e.g. `[{}]`), all values are } ] ``` - -```{=docbook} - -``` +::: ### Recursive attributes in `mkDerivation` {#mkderivation-recursive-attributes} diff --git a/doc/using-nixpkgs.md b/doc/using-nixpkgs.md new file mode 100644 index 0000000000000..bb222ae384fa6 --- /dev/null +++ b/doc/using-nixpkgs.md @@ -0,0 +1,7 @@ +# Using Nixpkgs {#part-using} + +```{=include=} chapters +using/configuration.chapter.md +using/overlays.chapter.md +using/overrides.chapter.md +``` diff --git a/doc/using/configuration.chapter.md b/doc/using/configuration.chapter.md index e657cb21c2959..8d246b117b05f 100644 --- a/doc/using/configuration.chapter.md +++ b/doc/using/configuration.chapter.md @@ -185,8 +185,10 @@ You can define a function called `packageOverrides` in your local `~/.config/nix The following attributes can be passed in [`config`](#chap-packageconfig). -```{=docbook} - +```{=include=} options +id-prefix: opt- +list-id: configuration-variable-list +source: ../config-options.json ``` diff --git a/pkgs/tools/nix/nixdoc/Cargo.lock b/pkgs/tools/nix/nixdoc/Cargo.lock deleted file mode 100644 index 0f672357cb5f5..0000000000000 --- a/pkgs/tools/nix/nixdoc/Cargo.lock +++ /dev/null @@ -1,309 +0,0 @@ -[[package]] -name = "ansi_term" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "arenatree" -version = "0.1.1" -source = "git+https://gitlab.com/jD91mZM2/arenatree#f9bf7efa9a5ef4c2dd9e2acc5a4cc79a987cb648" - -[[package]] -name = "arrayvec" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "atty" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", - "termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "backtrace" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", - "cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "backtrace-sys" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "bitflags" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "cc" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "cfg-if" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "clap" -version = "2.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", - "atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", - "strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "failure" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "failure_derive" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)", - "synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libc" -version = "0.2.43" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "nixdoc" -version = "1.0.1" -dependencies = [ - "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "rnix 0.4.1 (git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba)", - "structopt 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", - "xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "nodrop" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "proc-macro2" -version = "0.4.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "quote" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "redox_syscall" -version = "0.1.40" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "redox_termios" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rnix" -version = "0.4.1" -source = "git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba#10b86c94291b4864470158ef8750de85ddd8d4ba" -dependencies = [ - "arenatree 0.1.1 (git+https://gitlab.com/jD91mZM2/arenatree)", - "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", - "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", - "smol_str 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "smol_str" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "strsim" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "structopt" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", - "structopt-derive 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "structopt-derive" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "syn" -version = "0.15.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "synstructure" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", - "quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "termion" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "textwrap" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-width" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "vec_map" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "xml-rs" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b" -"checksum arenatree 0.1.1 (git+https://gitlab.com/jD91mZM2/arenatree)" = "" -"checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef" -"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652" -"checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a" -"checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0" -"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" -"checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16" -"checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4" -"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e" -"checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7" -"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596" -"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d" -"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" -"checksum proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "3d7b7eaaa90b4a90a932a9ea6666c95a389e424eff347f0f793979289429feee" -"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5" -"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1" -"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" -"checksum rnix 0.4.1 (git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba)" = "" -"checksum rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395" -"checksum smol_str 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f3ed6f19b800d76574926e458d5f8e2dbea86c2b58c08d33a982448f09ac8d0c" -"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550" -"checksum structopt 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d77af7242f18c40fd19cb270985930f239ee1646cfb482050bbae9da1d18743b" -"checksum structopt-derive 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "17ff01fe96de9d16e7372ae5f19dd7ece2c703b51043c3db9ea27f9e393ea311" -"checksum syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)" = "0a9c2bf1e53c21704a7cce1b2a42768f1ae32a6777108a0d7f1faa4bfe7f7c04" -"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015" -"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" -"checksum textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "307686869c93e71f94da64286f9a9524c0f308a9e1c87a583de8e9c9039ad3f6" -"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526" -"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" -"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" -"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0" -"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5" diff --git a/pkgs/tools/nix/nixdoc/default.nix b/pkgs/tools/nix/nixdoc/default.nix index 8562ff1c5e56d..785261a8e0925 100644 --- a/pkgs/tools/nix/nixdoc/default.nix +++ b/pkgs/tools/nix/nixdoc/default.nix @@ -2,38 +2,24 @@ rustPlatform.buildRustPackage rec { pname = "nixdoc"; - version = "1.0.1"; + version = "2.3.0"; src = fetchFromGitHub { - owner = "tazjin"; + owner = "nix-community"; repo = "nixdoc"; rev = "v${version}"; - sha256 = "14d4dq06jdqazxvv7fq5872zy0capxyb0fdkp8qg06gxl1iw201s"; + sha256 = "sha256-8pp6xlmdb3kZ6unTiO4yRruyEZ//GIHZF1k8f4kQr9Q="; }; - patches = [ - # Support nested identifiers https://github.com/nix-community/nixdoc/pull/27 - (fetchpatch { - url = "https://github.com/nix-community/nixdoc/pull/27/commits/ea542735bf675fe2ccd37edaffb9138d1a8c1b7e.patch"; - sha256 = "1fmz44jv2r9qsnjxvkkjfb0safy69l4x4vx1g5gisrp8nwdn94rj"; - }) - ]; + cargoSha256 = "sha256-k8/+BBMjQCsrgCi33fTdiSukaAZlg6XU3NwXaJdGYVw="; buildInputs = lib.optionals stdenv.isDarwin [ darwin.Security ]; - cargoLock = { - lockFile = ./Cargo.lock; - outputHashes = { - "arenatree-0.1.1" = "sha256-b3VVbYnWsjSjFMxvkfpJt13u+VC6baOIWD4qm1Gco4Q="; - "rnix-0.4.1" = "sha256-C1L/qXk6AimH7COrBlqpUA3giftaOYm/qNxs7rQgETA="; - }; - }; - meta = with lib; { description = "Generate documentation for Nix functions"; - homepage = "https://github.com/tazjin/nixdoc"; + homepage = "https://github.com/nix-community/nixdoc"; license = [ licenses.gpl3 ]; - maintainers = [ maintainers.tazjin ]; + maintainers = [ maintainers.asymmetric ]; platforms = platforms.unix; }; } diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py index 440cf35f0d387..6287b60f0a51d 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/commonmark.py @@ -184,3 +184,7 @@ def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> st def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: self._list_stack.pop() return "" + def image(self, token: Token, tokens: Sequence[Token], i: int) -> str: + if title := cast(str, token.attrs.get('title', '')): + title = ' "' + title.replace('"', '\\"') + '"' + return f'![{token.content}]({token.attrs["src"]}{title})' diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py index d25253d1a2f92..ffe64cde4d345 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/html.py @@ -44,6 +44,9 @@ def render(self, tokens: Sequence[Token]) -> str: result += self._close_headings(None) return result + def _pull_image(self, path: str) -> str: + raise NotImplementedError() + def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: return escape(token.content) def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: @@ -67,7 +70,8 @@ def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: if tokens[i + 1].type == 'link_close': tag, text = "xref", xref.title_html if xref.title: - title = f'title="{escape(xref.title, True)}"' + # titles are not attribute-safe on their own, so we need to replace quotes. + title = 'title="{}"'.format(xref.title.replace('"', '"')) target, href = "", xref.href() return f'{text}' def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: @@ -223,6 +227,106 @@ def example_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> s return '

' def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: return '


' + ) + def figure_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return ( + '

' + ' ' + ) + def figure_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return ( + ' ' + '

' + '
' + ) + def table_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return ( + '
' + '' + ) + def table_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return ( + '
' + '
' + ) + def thead_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + cols = [] + for j in range(i + 1, len(tokens)): + if tokens[j].type == 'thead_close': + break + elif tokens[j].type == 'th_open': + cols.append(cast(str, tokens[j].attrs.get('style', 'left')).removeprefix('text-align:')) + return "".join([ + "", + "".join([ f'' for col in cols ]), + "", + "", + ]) + def thead_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def tr_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def tr_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def th_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return f'' + def th_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def tbody_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def tbody_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def td_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return f'' + def td_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "" + def footnote_ref(self, token: Token, tokens: Sequence[Token], i: int) -> str: + href = self._xref_targets[token.meta['target']].href() + id = escape(cast(str, token.attrs["id"]), True) + return ( + f'' + f'[{token.meta["id"] + 1}]' + '' + ) + def footnote_block_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return ( + '
' + '
' + '
' + ) + def footnote_block_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "
" + def footnote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + # meta id,label + id = escape(self._xref_targets[token.meta["label"]].id, True) + return f'
' + def footnote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + return "
" + def footnote_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str: + href = self._xref_targets[token.meta['target']].href() + return ( + f'' + f'[{token.meta["id"] + 1}]' + '' + ) def _make_hN(self, level: int) -> tuple[str, str]: return f"h{min(6, max(1, level + self._hlevel_offset))}", "" diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py index ef81e4d7e1ab8..03c5a5dd39604 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py @@ -1,4 +1,5 @@ import argparse +import hashlib import html import json import re @@ -235,27 +236,48 @@ class HTMLParameters(NamedTuple): generator: str stylesheets: Sequence[str] scripts: Sequence[str] + # number of levels in the rendered table of contents. tables are prepended to + # the content they apply to (entire document / document chunk / top-level section + # of a chapter), setting a depth of 0 omits the respective table. toc_depth: int chunk_toc_depth: int + section_toc_depth: int + media_dir: Path class ManualHTMLRenderer(RendererMixin, HTMLRenderer): _base_path: Path + _in_dir: Path _html_params: HTMLParameters def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters, manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget], - base_path: Path): + in_dir: Path, base_path: Path): super().__init__(toplevel_tag, revision, manpage_urls, xref_targets) - self._base_path, self._html_params = base_path, html_params + self._in_dir = in_dir + self._base_path = base_path.absolute() + self._html_params = html_params + + def _pull_image(self, src: str) -> str: + src_path = Path(src) + content = (self._in_dir / src_path).read_bytes() + # images may be used more than once, but we want to store them only once and + # in an easily accessible (ie, not input-file-path-dependent) location without + # having to maintain a mapping structure. hashing the file and using the hash + # as both the path of the final image provides both. + content_hash = hashlib.sha3_256(content).hexdigest() + target_name = f"{content_hash}{src_path.suffix}" + target_path = self._base_path / self._html_params.media_dir / target_name + target_path.write_bytes(content) + return f"./{self._html_params.media_dir}/{target_name}" def _push(self, tag: str, hlevel_offset: int) -> Any: - result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) + result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset, self._in_dir) self._hlevel_offset += hlevel_offset self._toplevel_tag, self._headings, self._attrspans = tag, [], [] return result def _pop(self, state: Any) -> None: - (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state + (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset, self._in_dir) = state def _render_book(self, tokens: Sequence[Token]) -> str: assert tokens[4].children @@ -284,6 +306,7 @@ def _render_book(self, tokens: Sequence[Token]) -> str: def _file_header(self, toc: TocEntry) -> str: prev_link, up_link, next_link = "", "", "" prev_a, next_a, parent_title = "", "", " " + nav_html = "" home = toc.root if toc.prev: prev_link = f'' @@ -299,6 +322,22 @@ def _file_header(self, toc: TocEntry) -> str: if toc.next: next_link = f'' next_a = f'Next' + if toc.prev or toc.parent or toc.next: + nav_html = "\n".join([ + ' ', + ]) return "\n".join([ '', ' str: "".join((f'' for script in self._html_params.scripts)), f' ', - f' ', + f' ' if home.target.href() else "", f' {up_link}{prev_link}{next_link}', ' ', ' ', - ' ', + nav_html, ]) def _file_footer(self, toc: TocEntry) -> str: # prev, next = self._get_prev_and_next() prev_a, up_a, home_a, next_a = "", " ", " ", "" prev_text, up_text, next_text = "", "", "" + nav_html = "" home = toc.root if toc.prev: prev_a = f'Prev' @@ -348,22 +376,26 @@ def _file_footer(self, toc: TocEntry) -> str: next_a = f'Next' assert toc.next.target.title next_text = toc.next.target.title + if toc.prev or toc.parent or toc.next: + nav_html = "\n".join([ + ' ', + ]) return "\n".join([ - ' ', + nav_html, ' ', '', ]) @@ -374,7 +406,7 @@ def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str: return super()._heading_tag(token, tokens, i) def _build_toc(self, tokens: Sequence[Token], i: int) -> str: toc = TocEntry.of(tokens[i]) - if toc.kind == 'section': + if toc.kind == 'section' and self._html_params.section_toc_depth < 1: return "" def walk_and_emit(toc: TocEntry, depth: int) -> list[str]: if depth <= 0: @@ -394,34 +426,47 @@ def walk_and_emit(toc: TocEntry, depth: int) -> list[str]: if next_level: result.append(f'
{"".join(next_level)}
') return result - toc_depth = ( - self._html_params.chunk_toc_depth - if toc.starts_new_chunk and toc.kind != 'book' - else self._html_params.toc_depth - ) - if not (items := walk_and_emit(toc, toc_depth)): - return "" - examples = "" - if toc.examples: - examples_entries = [ - f'
{i + 1}. {ex.target.toc_html}
' - for i, ex in enumerate(toc.examples) + def build_list(kind: str, id: str, lst: Sequence[TocEntry]) -> str: + if not lst: + return "" + entries = [ + f'
{i}. {e.target.toc_html}
' + for i, e in enumerate(lst, start=1) ] - examples = ( - '
' - '

List of Examples

' - f'
{"".join(examples_entries)}
' + return ( + f'
' + f'

List of {kind}

' + f'
{"".join(entries)}
' '
' ) - return ( - f'
' - f'

Table of Contents

' + # we don't want to generate the "Title of Contents" header for sections, + # docbook doesn't and it's only distracting clutter unless it's the main table. + # we also want to generate tocs only for a top-level section (ie, one that is + # not itself contained in another section) + print_title = toc.kind != 'section' + if toc.kind == 'section': + if toc.parent and toc.parent.kind == 'section': + toc_depth = 0 + else: + toc_depth = self._html_params.section_toc_depth + elif toc.starts_new_chunk and toc.kind != 'book': + toc_depth = self._html_params.chunk_toc_depth + else: + toc_depth = self._html_params.toc_depth + if not (items := walk_and_emit(toc, toc_depth)): + return "" + figures = build_list("Figures", "list-of-figures", toc.figures) + examples = build_list("Examples", "list-of-examples", toc.examples) + return "".join([ + f'
', + '

Table of Contents

' if print_title else "", f'
' f' {"".join(items)}' f'
' f'
' + f'{figures}' f'{examples}' - ) + ]) def _make_hN(self, level: int) -> tuple[str, str]: # for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate @@ -458,8 +503,10 @@ def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: in # we do not set _hlevel_offset=0 because docbook doesn't either. else: inner = outer + in_dir = self._in_dir for included, path in fragments: try: + self._in_dir = (in_dir / path).parent inner.append(self.render(included)) except Exception as e: raise RuntimeError(f"rendering {path}") from e @@ -502,8 +549,9 @@ def __init__(self, revision: str, html_params: HTMLParameters, manpage_urls: Map # renderer not set on purpose since it has a dependency on the output path! def convert(self, infile: Path, outfile: Path) -> None: - self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params, - self._manpage_urls, self._xref_targets, outfile.parent) + self._renderer = ManualHTMLRenderer( + 'book', self._revision, self._html_params, self._manpage_urls, self._xref_targets, + infile.parent, outfile.parent) super().convert(infile, outfile) def _parse(self, src: str) -> list[Token]: @@ -525,23 +573,24 @@ def _parse(self, src: str) -> list[Token]: self._redirection_targets.add(into) return tokens - def _number_examples(self, tokens: Sequence[Token], start: int = 1) -> int: + def _number_block(self, block: str, prefix: str, tokens: Sequence[Token], start: int = 1) -> int: + title_open, title_close = f'{block}_title_open', f'{block}_title_close' for (i, token) in enumerate(tokens): - if token.type == "example_title_open": + if token.type == title_open: title = tokens[i + 1] assert title.type == 'inline' and title.children # the prefix is split into two tokens because the xref title_html will want # only the first of the two, but both must be rendered into the example itself. title.children = ( [ - Token('text', '', 0, content=f'Example {start}'), + Token('text', '', 0, content=f'{prefix} {start}'), Token('text', '', 0, content='. ') ] + title.children ) start += 1 elif token.type.startswith('included_') and token.type != 'included_options': for sub, _path in token.meta['included']: - start = self._number_examples(sub, start) + start = self._number_block(block, prefix, sub, start) return start # xref | (id, type, heading inlines, file, starts new file) @@ -567,6 +616,12 @@ def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file) elif bt.type == 'example_open' and (id := cast(str, bt.attrs.get('id', ''))): result.append((id, 'example', tokens[i + 2], target_file, False)) + elif bt.type == 'figure_open' and (id := cast(str, bt.attrs.get('id', ''))): + result.append((id, 'figure', tokens[i + 2], target_file, False)) + elif bt.type == 'footnote_open' and (id := cast(str, bt.attrs.get('id', ''))): + result.append(XrefTarget(id, "???", None, None, target_file)) + elif bt.type == 'footnote_ref' and (id := cast(str, bt.attrs.get('id', ''))): + result.append(XrefTarget(id, "???", None, None, target_file)) elif bt.type == 'inline': assert bt.children result += self._collect_ids(bt.children, target_file, typ, False) @@ -591,8 +646,8 @@ def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragme title = prefix + title_html toc_html = f"{n}. {title_html}" title_html = f"Appendix {n}" - elif typ == 'example': - # skip the prepended `Example N. ` from _number_examples + elif typ in ['example', 'figure']: + # skip the prepended `{Example,Figure} N. ` from numbering toc_html, title = self._renderer.renderInline(inlines.children[2:]), title_html # xref title wants only the prepended text, sans the trailing colon and space title_html = self._renderer.renderInline(inlines.children[0:1]) @@ -607,7 +662,8 @@ def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragme return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment) def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None: - self._number_examples(tokens) + self._number_block('example', "Example", tokens) + self._number_block('figure', "Figure", tokens) xref_queue = self._collect_ids(tokens, outfile.name, 'book', True) failed = False @@ -629,6 +685,22 @@ def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> failed = True # do another round and report the first error xref_queue = deferred + paths_seen = set() + for t in self._xref_targets.values(): + paths_seen.add(t.path) + + if len(paths_seen) == 1: + for (k, t) in self._xref_targets.items(): + self._xref_targets[k] = XrefTarget( + t.id, + t.title_html, + t.toc_html, + t.title, + t.path, + t.drop_fragment, + drop_target=True + ) + TocEntry.collect_and_link(self._xref_targets, tokens) @@ -647,6 +719,8 @@ def _build_cli_html(p: argparse.ArgumentParser) -> None: p.add_argument('--script', default=[], action='append') p.add_argument('--toc-depth', default=1, type=int) p.add_argument('--chunk-toc-depth', default=1, type=int) + p.add_argument('--section-toc-depth', default=0, type=int) + p.add_argument('--media-dir', default="media", type=Path) p.add_argument('infile', type=Path) p.add_argument('outfile', type=Path) @@ -660,7 +734,7 @@ def _run_cli_html(args: argparse.Namespace) -> None: md = HTMLConverter( args.revision, HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth, - args.chunk_toc_depth), + args.chunk_toc_depth, args.section_toc_depth, args.media_dir), json.load(manpage_urls)) md.convert(args.infile, args.outfile) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py index 95e6e9474e73f..c6e6bf4293706 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual_structure.py @@ -14,7 +14,7 @@ FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix'] # in the TOC all fragments are allowed, plus the all-encompassing book. -TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix', 'example'] +TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix', 'example', 'figure'] def is_include(token: Token) -> bool: return token.type == "fence" and token.info.startswith("{=include=} ") @@ -110,9 +110,12 @@ class XrefTarget: path: str """whether to drop the `#anchor` from links when expanding xrefs""" drop_fragment: bool = False + """whether to drop the `path.html` from links when expanding xrefs. + mostly useful for docbook compatibility""" + drop_target: bool = False def href(self) -> str: - path = html.escape(self.path, True) + path = "" if self.drop_target else html.escape(self.path, True) return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}" @dc.dataclass @@ -125,6 +128,7 @@ class TocEntry(Freezeable): children: list[TocEntry] = dc.field(default_factory=list) starts_new_chunk: bool = False examples: list[TocEntry] = dc.field(default_factory=list) + figures: list[TocEntry] = dc.field(default_factory=list) @property def root(self) -> TocEntry: @@ -139,7 +143,7 @@ def of(cls, token: Token) -> TocEntry: @classmethod def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry: - entries, examples = cls._collect_entries(xrefs, tokens, 'book') + entries, examples, figures = cls._collect_entries(xrefs, tokens, 'book') def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]: this.parent = parent @@ -157,6 +161,7 @@ def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[Toc paths_seen.add(c.target.path) flat[0].examples = examples + flat[0].figures = figures for c in flat: c.freeze() @@ -165,21 +170,23 @@ def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[Toc @classmethod def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token], - kind: TocEntryType) -> tuple[TocEntry, list[TocEntry]]: + kind: TocEntryType) -> tuple[TocEntry, list[TocEntry], list[TocEntry]]: # we assume that check_structure has been run recursively over the entire input. # list contains (tag, entry) pairs that will collapse to a single entry for # the full sequence. entries: list[tuple[str, TocEntry]] = [] examples: list[TocEntry] = [] + figures: list[TocEntry] = [] for token in tokens: if token.type.startswith('included_') and (included := token.meta.get('included')): fragment_type_str = token.type[9:].removesuffix('s') assert fragment_type_str in get_args(TocEntryType) fragment_type = cast(TocEntryType, fragment_type_str) for fragment, _path in included: - subentries, subexamples = cls._collect_entries(xrefs, fragment, fragment_type) + subentries, subexamples, subfigures = cls._collect_entries(xrefs, fragment, fragment_type) entries[-1][1].children.append(subentries) examples += subexamples + figures += subfigures elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))): while len(entries) > 1 and entries[-1][0] >= token.tag: entries[-2][1].children.append(entries.pop()[1]) @@ -188,7 +195,9 @@ def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token], token.meta['TocEntry'] = entries[-1][1] elif token.type == 'example_open' and (id := cast(str, token.attrs.get('id', ''))): examples.append(TocEntry('example', xrefs[id])) + elif token.type == 'figure_open' and (id := cast(str, token.attrs.get('id', ''))): + figures.append(TocEntry('figure', xrefs[id])) while len(entries) > 1: entries[-2][1].children.append(entries.pop()[1]) - return (entries[0][1], examples) + return (entries[0][1], examples, figures) diff --git a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py index 78e05642552b0..f754b61b44393 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py +++ b/pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/md.py @@ -1,6 +1,6 @@ from abc import ABC from collections.abc import Mapping, MutableMapping, Sequence -from typing import Any, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar +from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar import dataclasses import re @@ -12,6 +12,7 @@ from markdown_it.utils import OptionsDict from mdit_py_plugins.container import container_plugin # type: ignore[attr-defined] from mdit_py_plugins.deflist import deflist_plugin # type: ignore[attr-defined] +from mdit_py_plugins.footnote import footnote_plugin # type: ignore[attr-defined] from mdit_py_plugins.myst_role import myst_role_plugin # type: ignore[attr-defined] _md_escape_table = { @@ -40,7 +41,7 @@ def md_make_code(code: str, info: str = "", multiline: Optional[bool] = None) -> ticks, sep = ('`' * (longest + (3 if multiline else 1)), '\n' if multiline else ' ') return f"{ticks}{info}{sep}{code}{sep}{ticks}" -AttrBlockKind = Literal['admonition', 'example'] +AttrBlockKind = Literal['admonition', 'example', 'figure'] AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"] @@ -90,6 +91,29 @@ def __init__(self, manpage_urls: Mapping[str, str]): "example_close": self.example_close, "example_title_open": self.example_title_open, "example_title_close": self.example_title_close, + "image": self.image, + "figure_open": self.figure_open, + "figure_close": self.figure_close, + "figure_title_open": self.figure_title_open, + "figure_title_close": self.figure_title_close, + "table_open": self.table_open, + "table_close": self.table_close, + "thead_open": self.thead_open, + "thead_close": self.thead_close, + "tr_open": self.tr_open, + "tr_close": self.tr_close, + "th_open": self.th_open, + "th_close": self.th_close, + "tbody_open": self.tbody_open, + "tbody_close": self.tbody_close, + "td_open": self.td_open, + "td_close": self.td_close, + "footnote_ref": self.footnote_ref, + "footnote_block_open": self.footnote_block_open, + "footnote_block_close": self.footnote_block_close, + "footnote_open": self.footnote_open, + "footnote_close": self.footnote_close, + "footnote_anchor": self.footnote_anchor, } self._admonitions = { @@ -225,6 +249,52 @@ def example_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> s raise RuntimeError("md token not supported", token) def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: raise RuntimeError("md token not supported", token) + def image(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def figure_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def figure_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def figure_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def figure_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def table_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def table_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def thead_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def thead_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def tr_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def tr_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def th_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def th_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def tbody_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def tbody_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def td_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def td_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_ref(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_block_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_block_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) + def footnote_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str: + raise RuntimeError("md token not supported", token) def _is_escaped(src: str, pos: int) -> bool: found = 0 @@ -267,6 +337,8 @@ def _parse_blockattrs(info: str) -> Optional[tuple[AttrBlockKind, Optional[str], return ('admonition', id, classes) if classes == ['example']: return ('example', id, classes) + elif classes == ['figure']: + return ('figure', id, classes) return None def _attr_span_plugin(md: markdown_it.MarkdownIt) -> None: @@ -368,6 +440,32 @@ def heading_ids(state: markdown_it.rules_core.StateCore) -> None: md.core.ruler.before("replacements", "heading_ids", heading_ids) +def _footnote_ids(md: markdown_it.MarkdownIt) -> None: + """generate ids for footnotes, their refs, and their backlinks. the ids we + generate here are derived from the footnote label, making numeric footnote + labels invalid. + """ + def generate_ids(tokens: Sequence[Token]) -> None: + for token in tokens: + if token.type == 'footnote_open': + if token.meta["label"][:1].isdigit(): + assert token.map + raise RuntimeError(f"invalid footnote label in line {token.map[0] + 1}") + token.attrs['id'] = token.meta["label"] + elif token.type == 'footnote_anchor': + token.meta['target'] = f'{token.meta["label"]}.__back.{token.meta["subId"]}' + elif token.type == 'footnote_ref': + token.attrs['id'] = f'{token.meta["label"]}.__back.{token.meta["subId"]}' + token.meta['target'] = token.meta["label"] + elif token.type == 'inline': + assert token.children + generate_ids(token.children) + + def footnote_ids(state: markdown_it.rules_core.StateCore) -> None: + generate_ids(state.tokens) + + md.core.ruler.after("footnote_tail", "footnote_ids", footnote_ids) + def _compact_list_attr(md: markdown_it.MarkdownIt) -> None: @dataclasses.dataclass class Entry: @@ -416,6 +514,11 @@ def block_attr(state: markdown_it.rules_core.StateCore) -> None: if id is not None: token.attrs['id'] = id stack.append('example_close') + elif kind == 'figure': + token.type = 'figure_open' + if id is not None: + token.attrs['id'] = id + stack.append('figure_close') else: assert_never(kind) elif token.type == 'container_blockattr_close': @@ -423,31 +526,37 @@ def block_attr(state: markdown_it.rules_core.StateCore) -> None: md.core.ruler.push("block_attr", block_attr) -def _example_titles(md: markdown_it.MarkdownIt) -> None: +def _block_titles(block: str) -> Callable[[markdown_it.MarkdownIt], None]: + open, close = f'{block}_open', f'{block}_close' + title_open, title_close = f'{block}_title_open', f'{block}_title_close' + """ - find title headings of examples and stick them into meta for renderers, then - remove them from the token stream. also checks whether any example contains a + find title headings of blocks and stick them into meta for renderers, then + remove them from the token stream. also checks whether any block contains a non-title heading since those would make toc generation extremely complicated. """ - def example_titles(state: markdown_it.rules_core.StateCore) -> None: + def block_titles(state: markdown_it.rules_core.StateCore) -> None: in_example = [False] for i, token in enumerate(state.tokens): - if token.type == 'example_open': + if token.type == open: if state.tokens[i + 1].type == 'heading_open': assert state.tokens[i + 3].type == 'heading_close' - state.tokens[i + 1].type = 'example_title_open' - state.tokens[i + 3].type = 'example_title_close' + state.tokens[i + 1].type = title_open + state.tokens[i + 3].type = title_close else: assert token.map - raise RuntimeError(f"found example without title in line {token.map[0] + 1}") + raise RuntimeError(f"found {block} without title in line {token.map[0] + 1}") in_example.append(True) - elif token.type == 'example_close': + elif token.type == close: in_example.pop() elif token.type == 'heading_open' and in_example[-1]: assert token.map - raise RuntimeError(f"unexpected non-title heading in example in line {token.map[0] + 1}") + raise RuntimeError(f"unexpected non-title heading in {block} in line {token.map[0] + 1}") + + def do_add(md: markdown_it.MarkdownIt) -> None: + md.core.ruler.push(f"{block}_titles", block_titles) - md.core.ruler.push("example_titles", example_titles) + return do_add TR = TypeVar('TR', bound='Renderer') @@ -478,20 +587,24 @@ def __init__(self) -> None: }, renderer_cls=self.ForbiddenRenderer ) + self._md.enable('table') self._md.use( container_plugin, name="blockattr", validate=lambda name, *args: _parse_blockattrs(name), ) self._md.use(deflist_plugin) + self._md.use(footnote_plugin) self._md.use(myst_role_plugin) self._md.use(_attr_span_plugin) self._md.use(_inline_comment_plugin) self._md.use(_block_comment_plugin) self._md.use(_heading_ids) + self._md.use(_footnote_ids) self._md.use(_compact_list_attr) self._md.use(_block_attr) - self._md.use(_example_titles) + self._md.use(_block_titles("example")) + self._md.use(_block_titles("figure")) self._md.enable(["smartquotes", "replacements"]) def _parse(self, src: str) -> list[Token]: diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py index d808c5b50c345..4ff0bc3095c3d 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_commonmark.py @@ -91,3 +91,9 @@ def test_full() -> None: - *‌more stuff in same deflist‌*     foo""".replace(' ', ' ') + +def test_images() -> None: + c = Converter({}) + assert c._render("![*alt text*](foo \"title \\\"quoted\\\" text\")") == ( + "![*alt text*](foo \"title \\\"quoted\\\" text\")" + ) diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py index df366a8babd7e..96cf8d0b7dff1 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_html.py @@ -1,12 +1,17 @@ import nixos_render_docs as nrd import pytest +import textwrap from sample_md import sample1 +class Renderer(nrd.html.HTMLRenderer): + def _pull_image(self, src: str) -> str: + return src + class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]): def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]): super().__init__() - self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs) + self._renderer = Renderer(manpage_urls, xrefs) def unpretty(s: str) -> str: return "".join(map(str.strip, s.splitlines())).replace('␣', ' ').replace('↵', '\n') @@ -69,6 +74,78 @@ def test_xrefs() -> None: c._render("[](#baz)") assert exc.value.args[0] == 'bad local reference, id #baz not known' +def test_images() -> None: + c = Converter({}, {}) + assert c._render("![*alt text*](foo \"title text\")") == unpretty(""" +

+

+ *alt text* +
+

+ """) + +def test_tables() -> None: + c = Converter({}, {}) + assert c._render(textwrap.dedent(""" + | d | l | m | r | + |---|:--|:-:|--:| + | a | b | c | d | + """)) == unpretty(""" +
+ + + + + + + + + + + + + + + + + + + + + + + +
dlmr
abcd
+
+ """) + +def test_footnotes() -> None: + c = Converter({}, { + "bar": nrd.manual_structure.XrefTarget("bar", "", None, None, ""), + "bar.__back.0": nrd.manual_structure.XrefTarget("bar.__back.0", "", None, None, ""), + "bar.__back.1": nrd.manual_structure.XrefTarget("bar.__back.1", "", None, None, ""), + }) + assert c._render(textwrap.dedent(""" + foo [^bar] baz [^bar] + + [^bar]: note + """)) == unpretty(""" +

+ foo [1]␣ + baz [1] +

+
+
+
+
+

+ note[1] + [1] +

+
+
+ """) + def test_full() -> None: c = Converter({ 'man(1)': 'http://example.org' }, {}) assert c._render(sample1) == unpretty(""" diff --git a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py index fb7a4ab0117f7..8564297efdd3a 100644 --- a/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py +++ b/pkgs/tools/nix/nixos-render-docs/src/tests/test_plugins.py @@ -501,3 +501,28 @@ def test_example() -> None: with pytest.raises(RuntimeError) as exc: c._parse("::: {.example}\n### foo\n### bar\n:::") assert exc.value.args[0] == 'unexpected non-title heading in example in line 3' + +def test_footnotes() -> None: + c = Converter({}) + assert c._parse("text [^foo]\n\n[^foo]: bar") == [ + Token(type='paragraph_open', tag='p', nesting=1, map=[0, 1], block=True), + Token(type='inline', tag='', nesting=0, map=[0, 1], level=1, content='text [^foo]', block=True, + children=[ + Token(type='text', tag='', nesting=0, content='text '), + Token(type='footnote_ref', tag='', nesting=0, attrs={'id': 'foo.__back.0'}, + meta={'id': 0, 'subId': 0, 'label': 'foo', 'target': 'foo'}) + ]), + Token(type='paragraph_close', tag='p', nesting=-1, block=True), + Token(type='footnote_block_open', tag='', nesting=1), + Token(type='footnote_open', tag='', nesting=1, attrs={'id': 'foo'}, meta={'id': 0, 'label': 'foo'}), + Token(type='paragraph_open', tag='p', nesting=1, map=[2, 3], level=1, block=True, hidden=False), + Token(type='inline', tag='', nesting=0, map=[2, 3], level=2, content='bar', block=True, + children=[ + Token(type='text', tag='', nesting=0, content='bar') + ]), + Token(type='footnote_anchor', tag='', nesting=0, + meta={'id': 0, 'label': 'foo', 'subId': 0, 'target': 'foo.__back.0'}), + Token(type='paragraph_close', tag='p', nesting=-1, level=1, block=True), + Token(type='footnote_close', tag='', nesting=-1), + Token(type='footnote_block_close', tag='', nesting=-1), + ]