Merge pull request #239636 from pennae/nixpkgs-manual-nrd

doc: dedocbookify nixpkgs manual
This commit is contained in:
pennae 2023-07-03 20:48:23 +02:00 committed by GitHub
commit 06140b7ecf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
52 changed files with 984 additions and 1236 deletions

11
doc/.gitignore vendored
View file

@ -1,11 +0,0 @@
*.chapter.xml
*.section.xml
.version
functions/library/generated
functions/library/locations.xml
highlightjs
manual-full.xml
out
result
result-*
media

View file

@ -1,114 +0,0 @@
MD_TARGETS=$(addsuffix .xml, $(basename $(shell find . -type f -regex '.*\.md$$' -not -name README.md)))
PANDOC ?= pandoc
pandoc_media_dir = media
# NOTE: Keep in sync with conversion script (/maintainers/scripts/db-to-md.sh).
# TODO: Remove raw-attribute when we can get rid of DocBook altogether.
pandoc_commonmark_enabled_extensions = +attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute
# Not needed:
# - docbook-reader/citerefentry-to-rst-role.lua (only relevant for DocBook → MarkDown/rST/MyST)
pandoc_flags = --extract-media=$(pandoc_media_dir) \
--lua-filter=$(PANDOC_LUA_FILTERS_DIR)/diagram-generator.lua \
--lua-filter=build-aux/pandoc-filters/myst-reader/roles.lua \
--lua-filter=$(PANDOC_LINK_MANPAGES_FILTER) \
--lua-filter=build-aux/pandoc-filters/docbook-writer/rst-roles.lua \
--lua-filter=build-aux/pandoc-filters/docbook-writer/labelless-link-is-xref.lua \
-f commonmark$(pandoc_commonmark_enabled_extensions)+smart
.PHONY: all
all: validate format out/html/index.html out/epub/manual.epub
.PHONY: render-md
render-md: ${MD_TARGETS}
.PHONY: debug
debug:
nix-shell --run "xmloscopy --docbook5 ./manual.xml ./manual-full.xml"
.PHONY: format
format: doc-support/result
find . -iname '*.xml' -type f | while read f; do \
echo $$f ;\
xmlformat --config-file "doc-support/result/xmlformat.conf" -i $$f ;\
done
.PHONY: fix-misc-xml
fix-misc-xml:
find . -iname '*.xml' -type f \
-exec ../nixos/doc/varlistentry-fixer.rb {} ';'
.PHONY: clean
clean:
rm -f ${MD_TARGETS} doc-support/result .version manual-full.xml functions/library/locations.xml functions/library/generated
rm -rf ./out/ ./highlightjs ./media
.PHONY: validate
validate: manual-full.xml doc-support/result
jing doc-support/result/docbook.rng manual-full.xml
out/html/index.html: doc-support/result manual-full.xml style.css highlightjs
mkdir -p out/html
xsltproc \
--nonet --xinclude \
--output $@ \
doc-support/result/xhtml.xsl \
./manual-full.xml
mkdir -p out/html/highlightjs/
cp -r highlightjs out/html/
cp -r $(pandoc_media_dir) out/html/
cp ./overrides.css out/html/
cp ./style.css out/html/style.css
mkdir -p out/html/images/callouts
cp doc-support/result/xsl/docbook/images/callouts/*.svg out/html/images/callouts/
chmod u+w -R out/html/
out/epub/manual.epub: epub.xml
mkdir -p out/epub/scratch
xsltproc --nonet \
--output out/epub/scratch/ \
doc-support/result/epub.xsl \
./epub.xml
echo "application/epub+zip" > mimetype
zip -0Xq "out/epub/manual.epub" mimetype
rm mimetype
cd "out/epub/scratch/" && zip -Xr9D "../manual.epub" *
rm -rf "out/epub/scratch/"
highlightjs: doc-support/result
mkdir -p highlightjs
cp -r doc-support/result/highlightjs/highlight.pack.js highlightjs/
cp -r doc-support/result/highlightjs/LICENSE highlightjs/
cp -r doc-support/result/highlightjs/mono-blue.css highlightjs/
cp -r doc-support/result/highlightjs/loader.js highlightjs/
manual-full.xml: ${MD_TARGETS} .version functions/library/locations.xml functions/library/generated *.xml **/*.xml **/**/*.xml
xmllint --nonet --xinclude --noxincludenode manual.xml --output manual-full.xml
.version: doc-support/result
ln -rfs ./doc-support/result/version .version
doc-support/result: doc-support/default.nix
(cd doc-support; nix-build)
functions/library/locations.xml: doc-support/result
ln -rfs ./doc-support/result/function-locations.xml functions/library/locations.xml
functions/library/generated: doc-support/result
ln -rfs ./doc-support/result/function-docs functions/library/generated
%.section.xml: %.section.md
$(PANDOC) $^ -t docbook \
$(pandoc_flags) \
-o $@
%.chapter.xml: %.chapter.md
$(PANDOC) $^ -t docbook \
--top-level-division=chapter \
$(pandoc_flags) \
-o $@

View file

@ -1,23 +0,0 @@
--[[
Converts Code AST nodes produced by pandocs DocBook reader
from citerefentry elements into AST for corresponding role
for reStructuredText.
We use subset of MyST syntax (CommonMark with features from rST)
so lets use the rST AST for rST features.
Reference: https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-manpage
]]
function Code(elem)
elem.classes = elem.classes:map(function (x)
if x == 'citerefentry' then
elem.attributes['role'] = 'manpage'
return 'interpreted-text'
else
return x
end
end)
return elem
end

View file

@ -1,34 +0,0 @@
--[[
Converts Link AST nodes with empty label to DocBook xref elements.
This is a temporary script to be able use cross-references conveniently
using syntax taken from MyST, while we still use docbook-xsl
for generating the documentation.
Reference: https://myst-parser.readthedocs.io/en/latest/using/syntax.html#targets-and-cross-referencing
]]
local function starts_with(start, str)
return str:sub(1, #start) == start
end
local function escape_xml_arg(arg)
amps = arg:gsub('&', '&')
amps_quotes = amps:gsub('"', '"')
amps_quotes_lt = amps_quotes:gsub('<', '&lt;')
return amps_quotes_lt
end
function Link(elem)
has_no_content = #elem.content == 0
targets_anchor = starts_with('#', elem.target)
has_no_attributes = elem.title == '' and elem.identifier == '' and #elem.classes == 0 and #elem.attributes == 0
if has_no_content and targets_anchor and has_no_attributes then
-- xref expects idref without the pound-sign
target_without_hash = elem.target:sub(2, #elem.target)
return pandoc.RawInline('docbook', '<xref linkend="' .. escape_xml_arg(target_without_hash) .. '" />')
end
end

View file

@ -1,44 +0,0 @@
--[[
Converts AST for reStructuredText roles into corresponding
DocBook elements.
Currently, only a subset of roles is supported.
Reference:
List of roles:
https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html
manpage:
https://tdg.docbook.org/tdg/5.1/citerefentry.html
file:
https://tdg.docbook.org/tdg/5.1/filename.html
]]
function Code(elem)
if elem.classes:includes('interpreted-text') then
local tag = nil
local content = elem.text
if elem.attributes['role'] == 'manpage' then
tag = 'citerefentry'
local title, volnum = content:match('^(.+)%((%w+)%)$')
if title == nil then
-- No volnum in parentheses.
title = content
end
content = '<refentrytitle>' .. title .. '</refentrytitle>' .. (volnum ~= nil and ('<manvolnum>' .. volnum .. '</manvolnum>') or '')
elseif elem.attributes['role'] == 'file' then
tag = 'filename'
elseif elem.attributes['role'] == 'command' then
tag = 'command'
elseif elem.attributes['role'] == 'option' then
tag = 'option'
elseif elem.attributes['role'] == 'var' then
tag = 'varname'
elseif elem.attributes['role'] == 'env' then
tag = 'envar'
end
if tag ~= nil then
return pandoc.RawInline('docbook', '<' .. tag .. '>' .. content .. '</' .. tag .. '>')
end
end
end

View file

@ -1,28 +0,0 @@
{ pkgs ? import ../../.. {} }:
let
inherit (pkgs) lib;
manpageURLs = lib.importJSON (pkgs.path + "/doc/manpage-urls.json");
in pkgs.writeText "link-manpages.lua" ''
--[[
Adds links to known man pages that aren't already in a link.
]]
local manpage_urls = {
${lib.concatStringsSep "\n" (lib.mapAttrsToList (man: url:
" [${builtins.toJSON man}] = ${builtins.toJSON url},") manpageURLs)}
}
traverse = 'topdown'
-- Returning false as the second value aborts processing of child elements.
function Link(elem)
return elem, false
end
function Code(elem)
local is_man_role = elem.classes:includes('interpreted-text') and elem.attributes['role'] == 'manpage'
if is_man_role and manpage_urls[elem.text] ~= nil then
return pandoc.Link(elem, manpage_urls[elem.text]), false
end
end
''

View file

@ -1,36 +0,0 @@
--[[
Replaces Str AST nodes containing {role}, followed by a Code node
by a Code node with attrs that would be produced by rST reader
from the role syntax.
This is to emulate MyST syntax in Pandoc.
(MyST is a CommonMark flavour with rST features mixed in.)
Reference: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point
]]
function Inlines(inlines)
for i = #inlines-1,1,-1 do
local first = inlines[i]
local second = inlines[i+1]
local correct_tags = first.tag == 'Str' and second.tag == 'Code'
if correct_tags then
-- docutils supports alphanumeric strings separated by [-._:]
-- We are slightly more liberal for simplicity.
-- Allow preceding punctuation (eg '('), otherwise '({file}`...`)'
-- does not match. Also allow anything followed by a non-breaking space
-- since pandoc emits those after certain abbreviations (e.g. e.g.).
local prefix, role = first.text:match('^(.*){([-._+:%w]+)}$')
if role ~= nil and (prefix == '' or prefix:match("^.*[%p ]$") ~= nil) then
if prefix == '' then
inlines:remove(i)
else
first.text = prefix
end
second.attributes['role'] = role
second.classes:insert('interpreted-text')
end
end
end
return inlines
end

View file

@ -1,25 +0,0 @@
--[[
Replaces Code nodes with attrs that would be produced by rST reader
from the role syntax by a Str AST node containing {role}, followed by a Code node.
This is to emulate MyST syntax in Pandoc.
(MyST is a CommonMark flavour with rST features mixed in.)
Reference: https://myst-parser.readthedocs.io/en/latest/syntax/syntax.html#roles-an-in-line-extension-point
]]
function Code(elem)
local role = elem.attributes['role']
if elem.classes:includes('interpreted-text') and role ~= nil then
elem.classes = elem.classes:filter(function (c)
return c ~= 'interpreted-text'
end)
elem.attributes['role'] = nil
return {
pandoc.Str('{' .. role .. '}'),
elem,
}
end
end

12
doc/builders.md Normal file
View file

@ -0,0 +1,12 @@
# Builders {#part-builders}
```{=include=} chapters
builders/fetchers.chapter.md
builders/trivial-builders.chapter.md
builders/testers.chapter.md
builders/special.md
builders/images.md
hooks/index.md
languages-frameworks/index.md
builders/packages/index.md
```

13
doc/builders/images.md Normal file
View file

@ -0,0 +1,13 @@
# Images {#chap-images}
This chapter describes tools for creating various types of images.
```{=include=} sections
images/appimagetools.section.md
images/dockertools.section.md
images/ocitools.section.md
images/snaptools.section.md
images/portableservice.section.md
images/makediskimage.section.md
images/binarycache.section.md
```

View file

@ -1,15 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-images">
<title>Images</title>
<para>
This chapter describes tools for creating various types of images.
</para>
<xi:include href="images/appimagetools.section.xml" />
<xi:include href="images/dockertools.section.xml" />
<xi:include href="images/ocitools.section.xml" />
<xi:include href="images/snaptools.section.xml" />
<xi:include href="images/portableservice.section.xml" />
<xi:include href="images/makediskimage.section.xml" />
<xi:include href="images/binarycache.section.xml" />
</chapter>

View file

@ -1,6 +1,6 @@
# DLib {#dlib} # DLib {#dlib}
[DLib](http://dlib.net/) is a modern, C++-based toolkit which provides several machine learning algorithms. [DLib](http://dlib.net/) is a modern, C++\-based toolkit which provides several machine learning algorithms.
## Compiling without AVX support {#compiling-without-avx-support} ## Compiling without AVX support {#compiling-without-avx-support}

View file

@ -0,0 +1,27 @@
# Packages {#chap-packages}
This chapter contains information about how to use and maintain the Nix expressions for a number of specific packages, such as the Linux kernel or X.org.
```{=include=} sections
citrix.section.md
dlib.section.md
eclipse.section.md
elm.section.md
emacs.section.md
firefox.section.md
fish.section.md
fuse.section.md
ibus.section.md
kakoune.section.md
linux.section.md
locales.section.md
etc-files.section.md
nginx.section.md
opengl.section.md
shell-helpers.section.md
steam.section.md
cataclysm-dda.section.md
urxvt.section.md
weechat.section.md
xorg.section.md
```

View file

@ -1,29 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-packages">
<title>Packages</title>
<para>
This chapter contains information about how to use and maintain the Nix expressions for a number of specific packages, such as the Linux kernel or X.org.
</para>
<xi:include href="citrix.section.xml" />
<xi:include href="dlib.section.xml" />
<xi:include href="eclipse.section.xml" />
<xi:include href="elm.section.xml" />
<xi:include href="emacs.section.xml" />
<xi:include href="firefox.section.xml" />
<xi:include href="fish.section.xml" />
<xi:include href="fuse.section.xml" />
<xi:include href="ibus.section.xml" />
<xi:include href="kakoune.section.xml" />
<xi:include href="linux.section.xml" />
<xi:include href="locales.section.xml" />
<xi:include href="etc-files.section.xml" />
<xi:include href="nginx.section.xml" />
<xi:include href="opengl.section.xml" />
<xi:include href="shell-helpers.section.xml" />
<xi:include href="steam.section.xml" />
<xi:include href="cataclysm-dda.section.xml" />
<xi:include href="urxvt.section.xml" />
<xi:include href="weechat.section.xml" />
<xi:include href="xorg.section.xml" />
</chapter>

11
doc/builders/special.md Normal file
View file

@ -0,0 +1,11 @@
# Special builders {#chap-special}
This chapter describes several special builders.
```{=include=} sections
special/fhs-environments.section.md
special/makesetuphook.section.md
special/mkshell.section.md
special/darwin-builder.section.md
special/vm-tools.section.md
```

View file

@ -1,13 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-special">
<title>Special builders</title>
<para>
This chapter describes several special builders.
</para>
<xi:include href="special/fhs-environments.section.xml" />
<xi:include href="special/makesetuphook.section.xml" />
<xi:include href="special/mkshell.section.xml" />
<xi:include href="special/darwin-builder.section.xml" />
<xi:include href="special/vm-tools.section.xml" />
</chapter>

10
doc/contributing.md Normal file
View file

@ -0,0 +1,10 @@
# Contributing to Nixpkgs {#part-contributing}
```{=include=} chapters
contributing/quick-start.chapter.md
contributing/coding-conventions.chapter.md
contributing/submitting-changes.chapter.md
contributing/vulnerability-roundup.chapter.md
contributing/reviewing-contributions.chapter.md
contributing/contributing-to-documentation.chapter.md
```

View file

@ -0,0 +1,16 @@
digraph {
"small changes" [shape=none]
"mass-rebuilds and other large changes" [shape=none]
"critical security fixes" [shape=none]
"broken staging-next fixes" [shape=none]
"small changes" -> master
"mass-rebuilds and other large changes" -> staging
"critical security fixes" -> master
"broken staging-next fixes" -> "staging-next"
"staging-next" -> master [color="#E85EB0"] [label="stabilization ends"] [fontcolor="#E85EB0"]
"staging" -> "staging-next" [color="#E85EB0"] [label="stabilization starts"] [fontcolor="#E85EB0"]
master -> "staging-next" -> staging [color="#5F5EE8"] [label="every six hours (GitHub Action)"] [fontcolor="#5F5EE8"]
}

View file

@ -0,0 +1,102 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 7.1.0 (0)
-->
<!-- Pages: 1 -->
<svg width="743pt" height="291pt"
viewBox="0.00 0.00 743.00 291.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 287)">
<polygon fill="white" stroke="none" points="-4,4 -4,-287 739,-287 739,4 -4,4"/>
<!-- small changes -->
<g id="node1" class="node">
<title>small changes</title>
<text text-anchor="middle" x="59" y="-261.3" font-family="Times,serif" font-size="14.00">small changes</text>
</g>
<!-- master -->
<g id="node5" class="node">
<title>master</title>
<ellipse fill="none" stroke="black" cx="139" cy="-192" rx="43.59" ry="18"/>
<text text-anchor="middle" x="139" y="-188.3" font-family="Times,serif" font-size="14.00">master</text>
</g>
<!-- small changes&#45;&gt;master -->
<g id="edge1" class="edge">
<title>small changes&#45;&gt;master</title>
<path fill="none" stroke="black" d="M77.96,-247.17C88.42,-237.89 101.55,-226.23 112.96,-216.11"/>
<polygon fill="black" stroke="black" points="114.99,-218.99 120.14,-209.74 110.34,-213.76 114.99,-218.99"/>
</g>
<!-- mass&#45;rebuilds and other large changes -->
<g id="node2" class="node">
<title>mass&#45;rebuilds and other large changes</title>
<text text-anchor="middle" x="588" y="-101.3" font-family="Times,serif" font-size="14.00">mass&#45;rebuilds and other large changes</text>
</g>
<!-- staging -->
<g id="node6" class="node">
<title>staging</title>
<ellipse fill="none" stroke="black" cx="438" cy="-18" rx="45.49" ry="18"/>
<text text-anchor="middle" x="438" y="-14.3" font-family="Times,serif" font-size="14.00">staging</text>
</g>
<!-- mass&#45;rebuilds and other large changes&#45;&gt;staging -->
<g id="edge2" class="edge">
<title>mass&#45;rebuilds and other large changes&#45;&gt;staging</title>
<path fill="none" stroke="black" d="M587.48,-87.47C586.26,-76.55 582.89,-62.7 574,-54 553.19,-33.63 522.2,-24.65 495.05,-20.86"/>
<polygon fill="black" stroke="black" points="495.53,-17.39 485.2,-19.71 494.72,-24.35 495.53,-17.39"/>
</g>
<!-- critical security fixes -->
<g id="node3" class="node">
<title>critical security fixes</title>
<text text-anchor="middle" x="219" y="-261.3" font-family="Times,serif" font-size="14.00">critical security fixes</text>
</g>
<!-- critical security fixes&#45;&gt;master -->
<g id="edge3" class="edge">
<title>critical security fixes&#45;&gt;master</title>
<path fill="none" stroke="black" d="M200.04,-247.17C189.58,-237.89 176.45,-226.23 165.04,-216.11"/>
<polygon fill="black" stroke="black" points="167.66,-213.76 157.86,-209.74 163.01,-218.99 167.66,-213.76"/>
</g>
<!-- broken staging&#45;next fixes -->
<g id="node4" class="node">
<title>broken staging&#45;next fixes</title>
<text text-anchor="middle" x="414" y="-188.3" font-family="Times,serif" font-size="14.00">broken staging&#45;next fixes</text>
</g>
<!-- staging&#45;next -->
<g id="node7" class="node">
<title>staging&#45;next</title>
<ellipse fill="none" stroke="black" cx="272" cy="-105" rx="68.79" ry="18"/>
<text text-anchor="middle" x="272" y="-101.3" font-family="Times,serif" font-size="14.00">staging&#45;next</text>
</g>
<!-- broken staging&#45;next fixes&#45;&gt;staging&#45;next -->
<g id="edge4" class="edge">
<title>broken staging&#45;next fixes&#45;&gt;staging&#45;next</title>
<path fill="none" stroke="black" d="M410.2,-174.42C406.88,-163.48 400.98,-149.62 391,-141 377.77,-129.56 360.96,-121.86 344.17,-116.67"/>
<polygon fill="black" stroke="black" points="345.21,-113.33 334.63,-114.02 343.33,-120.07 345.21,-113.33"/>
</g>
<!-- master&#45;&gt;staging&#45;next -->
<g id="edge7" class="edge">
<title>master&#45;&gt;staging&#45;next</title>
<path fill="none" stroke="#5f5ee8" d="M96.55,-187.26C53.21,-181.83 -4.5,-169.14 20,-141 41.99,-115.74 126.36,-108.13 191.48,-106.11"/>
<polygon fill="#5f5ee8" stroke="#5f5ee8" points="191.57,-109.61 201.47,-105.85 191.38,-102.62 191.57,-109.61"/>
<text text-anchor="middle" x="133" y="-144.8" font-family="Times,serif" font-size="14.00" fill="#5f5ee8">every six hours (GitHub Action)</text>
</g>
<!-- staging&#45;&gt;staging&#45;next -->
<g id="edge6" class="edge">
<title>staging&#45;&gt;staging&#45;next</title>
<path fill="none" stroke="#e85eb0" d="M434.55,-36.2C431.48,-47.12 425.89,-60.72 416,-69 397.61,-84.41 373.51,-93.23 350.31,-98.23"/>
<polygon fill="#e85eb0" stroke="#e85eb0" points="349.67,-94.79 340.5,-100.1 350.98,-101.66 349.67,-94.79"/>
<text text-anchor="middle" x="493.5" y="-57.8" font-family="Times,serif" font-size="14.00" fill="#e85eb0">stabilization starts</text>
</g>
<!-- staging&#45;next&#45;&gt;master -->
<g id="edge5" class="edge">
<title>staging&#45;next&#45;&gt;master</title>
<path fill="none" stroke="#e85eb0" d="M268.22,-123.46C265.05,-134.22 259.46,-147.52 250,-156 233.94,-170.4 211.98,-178.87 191.83,-183.86"/>
<polygon fill="#e85eb0" stroke="#e85eb0" points="191.35,-180.38 182.34,-185.96 192.86,-187.22 191.35,-180.38"/>
<text text-anchor="middle" x="323.5" y="-144.8" font-family="Times,serif" font-size="14.00" fill="#e85eb0">stabilization ends</text>
</g>
<!-- staging&#45;next&#45;&gt;staging -->
<g id="edge8" class="edge">
<title>staging&#45;next&#45;&gt;staging</title>
<path fill="none" stroke="#5f5ee8" d="M221.07,-92.46C194.72,-84.14 170.92,-71.32 186,-54 210.78,-25.54 314.74,-19.48 381.15,-18.6"/>
<polygon fill="#5f5ee8" stroke="#5f5ee8" points="380.79,-22.1 390.76,-18.51 380.73,-15.1 380.79,-22.1"/>
<text text-anchor="middle" x="299" y="-57.8" font-family="Times,serif" font-size="14.00" fill="#5f5ee8">every six hours (GitHub Action)</text>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 5.6 KiB

View file

@ -214,24 +214,11 @@ The last checkbox is fits [CONTRIBUTING.md](https://github.com/NixOS/nixpkgs/blo
- Hydra builds for master and staging should not be used as testing platform, its a build farm for changes that have been already tested. - Hydra builds for master and staging should not be used as testing platform, its a build farm for changes that have been already tested.
- When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break peoples installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from \@edolstra. - When changing the bootloader installation process, extra care must be taken. Grub installations cannot be rolled back, hence changes may break peoples installations forever. For any non-trivial change to the bootloader please file a PR asking for review, especially from \@edolstra.
```{.graphviz caption="Staging workflow"} ::: {.figure #fig-staging-workflow}
digraph { # Staging workflow
"small changes" [shape=none] <!-- generated from ./staging-workflow.dot using: dot -Tsvg staging-workflow.dot > staging-workflow.svg -->
"mass-rebuilds and other large changes" [shape=none] ![Staging workflow](./staging-workflow.svg)
"critical security fixes" [shape=none] :::
"broken staging-next fixes" [shape=none]
"small changes" -> master
"mass-rebuilds and other large changes" -> staging
"critical security fixes" -> master
"broken staging-next fixes" -> "staging-next"
"staging-next" -> master [color="#E85EB0"] [label="stabilization ends"] [fontcolor="#E85EB0"]
"staging" -> "staging-next" [color="#E85EB0"] [label="stabilization starts"] [fontcolor="#E85EB0"]
master -> "staging-next" -> staging [color="#5F5EE8"] [label="every six hours (GitHub Action)"] [fontcolor="#5F5EE8"]
}
```
[This GitHub Action](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/periodic-merge-6h.yml) brings changes from `master` to `staging-next` and from `staging-next` to `staging` every 6 hours; these are the blue arrows in the diagram above. The purple arrows in the diagram above are done manually and much less frequently. You can get an idea of how often these merges occur by looking at the git history. [This GitHub Action](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/periodic-merge-6h.yml) brings changes from `master` to `staging-next` and from `staging-next` to `staging` every 6 hours; these are the blue arrows in the diagram above. The purple arrows in the diagram above are done manually and much less frequently. You can get an idea of how often these merges occur by looking at the git history.

View file

@ -1,24 +1,28 @@
{ pkgs ? (import ./.. { }), nixpkgs ? { }}: { pkgs ? (import ./.. { }), nixpkgs ? { }}:
let let
doc-support = import ./doc-support { inherit pkgs nixpkgs; }; inherit (pkgs) lib;
in pkgs.stdenv.mkDerivation { inherit (lib) hasPrefix removePrefix;
name = "nixpkgs-manual";
nativeBuildInputs = with pkgs; [ lib-docs = import ./doc-support/lib-function-docs.nix {
pandoc inherit pkgs nixpkgs;
graphviz libsets = [
libxml2 { name = "asserts"; description = "assertion functions"; }
libxslt { name = "attrsets"; description = "attribute set functions"; }
zip { name = "strings"; description = "string manipulation functions"; }
jing { name = "versions"; description = "version string functions"; }
xmlformat { name = "trivial"; description = "miscellaneous functions"; }
{ name = "lists"; description = "list manipulation functions"; }
{ name = "debug"; description = "debugging functions"; }
{ name = "options"; description = "NixOS / nixpkgs option handling"; }
{ name = "path"; description = "path functions"; }
{ name = "filesystem"; description = "filesystem functions"; }
{ name = "sources"; description = "source filtering functions"; }
{ name = "cli"; description = "command-line serialization functions"; }
]; ];
};
src = pkgs.nix-gitignore.gitignoreSource [] ./.; epub = pkgs.runCommand "manual.epub" {
nativeBuildInputs = with pkgs; [ libxslt zip ];
postPatch = ''
ln -s ${doc-support} ./doc-support/result
'';
epub = '' epub = ''
<book xmlns="http://docbook.org/ns/docbook" <book xmlns="http://docbook.org/ns/docbook"
@ -43,27 +47,99 @@ in pkgs.stdenv.mkDerivation {
</chapter> </chapter>
</book> </book>
''; '';
passAsFile = [ "epub" ];
preBuild = '' passAsFile = [ "epub" ];
cp $epubPath epub.xml } ''
make -j$NIX_BUILD_CORES render-md mkdir scratch
xsltproc \
--param chapter.autolabel 0 \
--nonet \
--output scratch/ \
${pkgs.docbook_xsl_ns}/xml/xsl/docbook/epub/docbook.xsl \
$epubPath
echo "application/epub+zip" > mimetype
zip -0Xq "$out" mimetype
cd scratch && zip -Xr9D "$out" *
'';
# NB: This file describes the Nixpkgs manual, which happens to use module
# docs infra originally developed for NixOS.
optionsDoc = pkgs.nixosOptionsDoc {
inherit (pkgs.lib.evalModules {
modules = [ ../pkgs/top-level/config.nix ];
class = "nixpkgsConfig";
}) options;
documentType = "none";
transformOptions = opt:
opt // {
declarations =
map
(decl:
if hasPrefix (toString ../..) (toString decl)
then
let subpath = removePrefix "/" (removePrefix (toString ../.) (toString decl));
in { url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}"; name = subpath; }
else decl)
opt.declarations;
};
};
in pkgs.stdenv.mkDerivation {
name = "nixpkgs-manual";
nativeBuildInputs = with pkgs; [
nixos-render-docs
];
src = ./.;
postPatch = ''
ln -s ${optionsDoc.optionsJSON}/share/doc/nixos/options.json ./config-options.json
'';
buildPhase = ''
cat \
./functions/library.md.in \
${lib-docs}/index.md \
> ./functions/library.md
substitute ./manual.md.in ./manual.md \
--replace '@MANUAL_VERSION@' '${pkgs.lib.version}'
mkdir -p out/media
mkdir -p out/highlightjs
cp -t out/highlightjs \
${pkgs.documentation-highlighter}/highlight.pack.js \
${pkgs.documentation-highlighter}/LICENSE \
${pkgs.documentation-highlighter}/mono-blue.css \
${pkgs.documentation-highlighter}/loader.js
cp -t out ./overrides.css ./style.css
nixos-render-docs manual html \
--manpage-urls ./manpage-urls.json \
--revision ${pkgs.lib.trivial.revisionWithDefault (pkgs.rev or "master")} \
--stylesheet style.css \
--stylesheet overrides.css \
--stylesheet highlightjs/mono-blue.css \
--script ./highlightjs/highlight.pack.js \
--script ./highlightjs/loader.js \
--toc-depth 1 \
--section-toc-depth 1 \
manual.md \
out/index.html
''; '';
installPhase = '' installPhase = ''
dest="$out/share/doc/nixpkgs" dest="$out/share/doc/nixpkgs"
mkdir -p "$(dirname "$dest")" mkdir -p "$(dirname "$dest")"
mv out/html "$dest" mv out "$dest"
mv "$dest/index.html" "$dest/manual.html" mv "$dest/index.html" "$dest/manual.html"
mv out/epub/manual.epub "$dest/nixpkgs-manual.epub" cp ${epub} "$dest/nixpkgs-manual.epub"
mkdir -p $out/nix-support/ mkdir -p $out/nix-support/
echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products
echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products
''; '';
# Environment variables
PANDOC_LUA_FILTERS_DIR = "${pkgs.pandoc-lua-filters}/share/pandoc/filters";
PANDOC_LINK_MANPAGES_FILTER = import build-aux/pandoc-filters/link-manpages.nix { inherit pkgs; };
} }

View file

@ -1,87 +0,0 @@
{ pkgs ? (import ../.. {}), nixpkgs ? { }}:
let
inherit (pkgs) lib;
inherit (lib) hasPrefix removePrefix;
libsets = [
{ name = "asserts"; description = "assertion functions"; }
{ name = "attrsets"; description = "attribute set functions"; }
{ name = "strings"; description = "string manipulation functions"; }
{ name = "versions"; description = "version string functions"; }
{ name = "trivial"; description = "miscellaneous functions"; }
{ name = "lists"; description = "list manipulation functions"; }
{ name = "debug"; description = "debugging functions"; }
{ name = "options"; description = "NixOS / nixpkgs option handling"; }
{ name = "path"; description = "path functions"; }
{ name = "filesystem"; description = "filesystem functions"; }
{ name = "sources"; description = "source filtering functions"; }
{ name = "cli"; description = "command-line serialization functions"; }
];
locationsXml = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; };
functionDocs = import ./lib-function-docs.nix { inherit locationsXml pkgs libsets; };
version = pkgs.lib.version;
epub-xsl = pkgs.writeText "epub.xsl" ''
<?xml version='1.0'?>
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
version="1.0">
<xsl:import href="${pkgs.docbook_xsl_ns}/xml/xsl/docbook/epub/docbook.xsl" />
<xsl:import href="${./parameters.xml}"/>
</xsl:stylesheet>
'';
xhtml-xsl = pkgs.writeText "xhtml.xsl" ''
<?xml version='1.0'?>
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
version="1.0">
<xsl:import href="${pkgs.docbook_xsl_ns}/xml/xsl/docbook/xhtml/docbook.xsl" />
<xsl:import href="${./parameters.xml}"/>
</xsl:stylesheet>
'';
# NB: This file describes the Nixpkgs manual, which happens to use module
# docs infra originally developed for NixOS.
optionsDoc = pkgs.nixosOptionsDoc {
inherit (pkgs.lib.evalModules {
modules = [ ../../pkgs/top-level/config.nix ];
class = "nixpkgsConfig";
}) options;
documentType = "none";
transformOptions = opt:
opt // {
declarations =
map
(decl:
if hasPrefix (toString ../..) (toString decl)
then
let subpath = removePrefix "/" (removePrefix (toString ../..) (toString decl));
in { url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}"; name = subpath; }
else decl)
opt.declarations;
};
};
in pkgs.runCommand "doc-support" {}
''
mkdir result
(
cd result
ln -s ${locationsXml} ./function-locations.xml
ln -s ${functionDocs} ./function-docs
ln -s ${optionsDoc.optionsDocBook} ./config-options.docbook.xml
ln -s ${pkgs.docbook5}/xml/rng/docbook/docbook.rng ./docbook.rng
ln -s ${pkgs.docbook_xsl_ns}/xml/xsl ./xsl
ln -s ${epub-xsl} ./epub.xsl
ln -s ${xhtml-xsl} ./xhtml.xsl
ln -s ${./xmlformat.conf} ./xmlformat.conf
ln -s ${pkgs.documentation-highlighter} ./highlightjs
echo -n "${version}" > ./version
)
mv result $out
''

View file

@ -1,8 +1,13 @@
# Generates the documentation for library functions via nixdoc. # Generates the documentation for library functions via nixdoc.
{ pkgs, locationsXml, libsets }: { pkgs, nixpkgs, libsets }:
with pkgs; stdenv.mkDerivation { with pkgs;
let
locationsJSON = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; };
in
stdenv.mkDerivation {
name = "nixpkgs-lib-docs"; name = "nixpkgs-lib-docs";
src = ../../lib; src = ../../lib;
@ -11,26 +16,23 @@ with pkgs; stdenv.mkDerivation {
function docgen { function docgen {
# TODO: wrap lib.$1 in <literal>, make nixdoc not escape it # TODO: wrap lib.$1 in <literal>, make nixdoc not escape it
if [[ -e "../lib/$1.nix" ]]; then if [[ -e "../lib/$1.nix" ]]; then
nixdoc -c "$1" -d "lib.$1: $2" -f "$1.nix" > "$out/$1.xml" nixdoc -c "$1" -d "lib.$1: $2" -l ${locationsJSON} -f "$1.nix" > "$out/$1.md"
else else
nixdoc -c "$1" -d "lib.$1: $2" -f "$1/default.nix" > "$out/$1.xml" nixdoc -c "$1" -d "lib.$1: $2" -l ${locationsJSON} -f "$1/default.nix" > "$out/$1.md"
fi fi
echo "<xi:include href='$1.xml' />" >> "$out/index.xml" echo "$out/$1.md" >> "$out/index.md"
} }
mkdir -p "$out" mkdir -p "$out"
cat > "$out/index.xml" << 'EOF' cat > "$out/index.md" << 'EOF'
<?xml version="1.0" encoding="utf-8"?> ```{=include=} sections
<root xmlns:xi="http://www.w3.org/2001/XInclude">
EOF EOF
${lib.concatMapStrings ({ name, description }: '' ${lib.concatMapStrings ({ name, description }: ''
docgen ${name} ${lib.escapeShellArg description} docgen ${name} ${lib.escapeShellArg description}
'') libsets} '') libsets}
echo "</root>" >> "$out/index.xml" echo '```' >> "$out/index.md"
ln -s ${locationsXml} $out/locations.xml
''; '';
} }

View file

@ -58,28 +58,18 @@ let
[ "-prime" ]; [ "-prime" ];
urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}"; urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}";
xmlstrings = (nixpkgsLib.strings.concatMapStrings jsonLocs = builtins.listToAttrs
({ name, value }: (builtins.map
'' ({ name, value }: {
<section><title>${name}</title> name = sanitizeId name;
<para xml:id="${sanitizeId name}"> value =
Located at let
<link text = "${value.file}:${builtins.toString value.line}";
xlink:href="${urlPrefix}/${value.file}#L${builtins.toString value.line}">${value.file}:${builtins.toString value.line}</link> target = "${urlPrefix}/${value.file}#L${builtins.toString value.line}";
in <literal>&lt;nixpkgs&gt;</literal>. in
</para> "[${text}](${target}) in `<nixpkgs>`";
</section> })
'')
relativeLocs); relativeLocs);
in pkgs.writeText in
"locations.xml" pkgs.writeText "locations.json" (builtins.toJSON jsonLocs)
''
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
version="5">
<title>All the locations for every lib function</title>
<para>This file is only for inclusion by other files.</para>
${xmlstrings}
</section>
''

View file

@ -1,19 +0,0 @@
<?xml version='1.0'?>
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
version="1.0">
<xsl:param name="chapter.autolabel" select="0" />
<xsl:param name="part.autolabel" select="0" />
<xsl:param name="preface.autolabel" select="0" />
<xsl:param name="reference.autolabel" select="0" />
<xsl:param name="section.autolabel" select="0" />
<xsl:param name="html.stylesheet" select="'style.css overrides.css highlightjs/mono-blue.css'" />
<xsl:param name="html.script" select="'./highlightjs/highlight.pack.js ./highlightjs/loader.js'" />
<xsl:param name="xref.with.number.and.title" select="0" />
<xsl:param name="use.id.as.filename" select="1" />
<xsl:param name="generate.section.toc.level" select="1" />
<xsl:param name="toc.section.depth" select="0" />
<xsl:param name="admon.style" select="''" />
<xsl:param name="callout.graphics.extension" select="'.svg'" />
<xsl:param name="generate.consistent.ids" select="1" />
</xsl:stylesheet>

View file

@ -1,72 +0,0 @@
#
# DocBook Configuration file for "xmlformat"
# see http://www.kitebird.com/software/xmlformat/
# 10 Sept. 2004
#
# Only block elements
ackno address appendix article biblioentry bibliography bibliomixed \
biblioset blockquote book bridgehead callout calloutlist caption caution \
chapter chapterinfo classsynopsis cmdsynopsis colophon constraintdef \
constructorsynopsis dedication destructorsynopsis entry epigraph equation example \
figure formalpara funcsynopsis glossary glossdef glossdiv glossentry glosslist \
glosssee glossseealso graphic graphicco highlights imageobjectco important \
index indexdiv indexentry indexinfo info informalequation informalexample \
informalfigure informaltable legalnotice literallayout lot lotentry mediaobject \
mediaobjectco msgmain msgset note orderedlist para part preface primaryie \
procedure qandadiv qandaentry qandaset refentry refentrytitle reference \
refnamediv refsect1 refsect2 refsect3 refsection revhistory screenshot sect1 \
sect2 sect3 sect4 sect5 section seglistitem set setindex sidebar simpara \
simplesect step substeps synopfragment synopsis table term title \
toc variablelist varlistentry warning itemizedlist listitem \
footnote colspec partintro row simplelist subtitle tbody tgroup thead tip
format block
normalize no
#appendix bibliography chapter glossary preface reference
# element-break 3
sect1 section
element-break 2
#
para abstract
format block
entry-break 1
exit-break 1
normalize yes
title
format block
normalize = yes
entry-break = 0
exit-break = 0
# Inline elements
abbrev accel acronym action application citation citebiblioid citerefentry citetitle \
classname co code command computeroutput constant country database date email emphasis \
envar errorcode errorname errortext errortype exceptionname fax filename \
firstname firstterm footnoteref foreignphrase funcdef funcparams function \
glossterm group guibutton guiicon guilabel guimenu guimenuitem guisubmenu \
hardware holder honorific indexterm inlineequation inlinegraphic inlinemediaobject \
interface interfacename \
keycap keycode keycombo keysym lineage link literal manvolnum markup medialabel \
menuchoice methodname methodparam modifier mousebutton olink ooclass ooexception \
oointerface option optional otheraddr othername package paramdef parameter personname \
phrase pob postcode productname prompt property quote refpurpose replaceable \
returnvalue revnumber sgmltag state street structfield structname subscript \
superscript surname symbol systemitem token trademark type ulink userinput \
uri varargs varname void wordasword xref year mathphrase member tag
format inline
programlisting screen
format verbatim
entry-break = 0
exit-break = 0
# This is needed so that the spacing inside those tags is kept.
term cmdsynopsis arg
normalize yes
format block

11
doc/functions.md Normal file
View file

@ -0,0 +1,11 @@
# Functions reference {#chap-functions}
The nixpkgs repository has several utility functions to manipulate Nix expressions.
```{=include=} sections
functions/library.md
functions/generators.section.md
functions/debug.section.md
functions/prefer-remote-fetch.section.md
functions/nix-gitignore.section.md
```

View file

@ -1,14 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-functions">
<title>Functions reference</title>
<para>
The nixpkgs repository has several utility functions to manipulate Nix expressions.
</para>
<xi:include href="functions/library.xml" />
<xi:include href="functions/generators.section.xml" />
<xi:include href="functions/debug.section.xml" />
<xi:include href="functions/prefer-remote-fetch.section.xml" />
<xi:include href="functions/nix-gitignore.section.xml" />
</chapter>

View file

@ -0,0 +1,5 @@
# Nixpkgs Library Functions {#sec-functions-library}
Nixpkgs provides a standard library at `pkgs.lib`, or through `import <nixpkgs/lib>`.
<!-- nixdoc-generated documentation must be appended here during build! -->

View file

@ -1,14 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-functions-library">
<title>Nixpkgs Library Functions</title>
<para>
Nixpkgs provides a standard library at <varname>pkgs.lib</varname>, or through <code>import &lt;nixpkgs/lib&gt;</code>.
</para>
<!-- The index must have a root element to declare namespaces, but we
don't want to include it, so we select all of its children. -->
<xi:include href="./library/generated/index.xml" xpointer="xpointer(/root/*)" />
</section>

33
doc/hooks/index.md Normal file
View file

@ -0,0 +1,33 @@
# Hooks reference {#chap-hooks}
Nixpkgs has several hook packages that augment the stdenv phases.
The stdenv built-in hooks are documented in [](#ssec-setup-hooks).
```{=include=} sections
autoconf.section.md
automake.section.md
autopatchelf.section.md
breakpoint.section.md
cmake.section.md
gdk-pixbuf.section.md
ghc.section.md
gnome.section.md
installShellFiles.section.md
libiconv.section.md
libxml2.section.md
meson.section.md
ninja.section.md
patch-rc-path-hooks.section.md
perl.section.md
pkg-config.section.md
postgresql-test-hook.section.md
python.section.md
qt-4.section.md
scons.section.md
tetex-tex-live.section.md
unzip.section.md
validatePkgConfig.section.md
waf.section.md
xcbuild.section.md
```

View file

@ -1,37 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-hooks">
<title>Hooks reference</title>
<para>
Nixpkgs has several hook packages that augment the stdenv phases.
</para>
<para>
The stdenv built-in hooks are documented in <xref linkend="ssec-setup-hooks"/>.
</para>
<xi:include href="./autoconf.section.xml" />
<xi:include href="./automake.section.xml" />
<xi:include href="./autopatchelf.section.xml" />
<xi:include href="./breakpoint.section.xml" />
<xi:include href="./cmake.section.xml" />
<xi:include href="./gdk-pixbuf.section.xml" />
<xi:include href="./ghc.section.xml" />
<xi:include href="./gnome.section.xml" />
<xi:include href="./installShellFiles.section.xml" />
<xi:include href="./libiconv.section.xml" />
<xi:include href="./libxml2.section.xml" />
<xi:include href="./meson.section.xml" />
<xi:include href="./ninja.section.xml" />
<xi:include href="./patch-rc-path-hooks.section.xml" />
<xi:include href="./perl.section.xml" />
<xi:include href="./pkg-config.section.xml" />
<xi:include href="./postgresql-test-hook.section.xml" />
<xi:include href="./python.section.xml" />
<xi:include href="./qt-4.section.xml" />
<xi:include href="./scons.section.xml" />
<xi:include href="./tetex-tex-live.section.xml" />
<xi:include href="./unzip.section.xml" />
<xi:include href="./validatePkgConfig.section.xml" />
<xi:include href="./waf.section.xml" />
<xi:include href="./xcbuild.section.xml" />
</chapter>

View file

@ -0,0 +1,45 @@
# Languages and frameworks {#chap-language-support}
The [standard build environment](#chap-stdenv) makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accommodated by overriding the appropriate phases of `stdenv`. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter.
```{=include=} sections
agda.section.md
android.section.md
beam.section.md
bower.section.md
chicken.section.md
coq.section.md
crystal.section.md
cuda.section.md
cuelang.section.md
dart.section.md
dhall.section.md
dotnet.section.md
emscripten.section.md
gnome.section.md
go.section.md
haskell.section.md
hy.section.md
idris.section.md
ios.section.md
java.section.md
javascript.section.md
lisp.section.md
lua.section.md
maven.section.md
nim.section.md
ocaml.section.md
octave.section.md
perl.section.md
php.section.md
pkg-config.section.md
python.section.md
qt.section.md
r.section.md
ruby.section.md
rust.section.md
swift.section.md
texlive.section.md
titanium.section.md
vim.section.md
```

View file

@ -1,47 +0,0 @@
<chapter xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="chap-language-support">
<title>Languages and frameworks</title>
<para>
The <link linkend="chap-stdenv">standard build environment</link> makes it easy to build typical Autotools-based packages with very little code. Any other kind of package can be accommodated by overriding the appropriate phases of <literal>stdenv</literal>. However, there are specialised functions in Nixpkgs to easily build packages for other programming languages, such as Perl or Haskell. These are described in this chapter.
</para>
<xi:include href="agda.section.xml" />
<xi:include href="android.section.xml" />
<xi:include href="beam.section.xml" />
<xi:include href="bower.section.xml" />
<xi:include href="chicken.section.xml" />
<xi:include href="coq.section.xml" />
<xi:include href="crystal.section.xml" />
<xi:include href="cuda.section.xml" />
<xi:include href="cuelang.section.xml" />
<xi:include href="dart.section.xml" />
<xi:include href="dhall.section.xml" />
<xi:include href="dotnet.section.xml" />
<xi:include href="emscripten.section.xml" />
<xi:include href="gnome.section.xml" />
<xi:include href="go.section.xml" />
<xi:include href="haskell.section.xml" />
<xi:include href="hy.section.xml" />
<xi:include href="idris.section.xml" />
<xi:include href="ios.section.xml" />
<xi:include href="java.section.xml" />
<xi:include href="javascript.section.xml" />
<xi:include href="lisp.section.xml" />
<xi:include href="lua.section.xml" />
<xi:include href="maven.section.xml" />
<xi:include href="nim.section.xml" />
<xi:include href="ocaml.section.xml" />
<xi:include href="octave.section.xml" />
<xi:include href="perl.section.xml" />
<xi:include href="php.section.xml" />
<xi:include href="pkg-config.section.xml" />
<xi:include href="python.section.xml" />
<xi:include href="qt.section.xml" />
<xi:include href="r.section.xml" />
<xi:include href="ruby.section.xml" />
<xi:include href="rust.section.xml" />
<xi:include href="swift.section.xml" />
<xi:include href="texlive.section.xml" />
<xi:include href="titanium.section.xml" />
<xi:include href="vim.section.xml" />
</chapter>

6
doc/lib.md Normal file
View file

@ -0,0 +1,6 @@
# Nixpkgs `lib` {#id-1.4}
```{=include=} chapters
functions.md
module-system/module-system.chapter.md
```

14
doc/manual.md.in Normal file
View file

@ -0,0 +1,14 @@
# Nixpkgs Manual {#nixpkgs-manual}
## Version @MANUAL_VERSION@
```{=include=} chapters
preface.chapter.md
```
```{=include=} parts
using-nixpkgs.md
lib.md
stdenv.md
builders.md
contributing.md
```

View file

@ -1,49 +0,0 @@
<book xmlns="http://docbook.org/ns/docbook"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="nixpkgs-manual">
<info>
<title>Nixpkgs Manual</title>
<subtitle>Version <xi:include href=".version" parse="text" />
</subtitle>
</info>
<xi:include href="preface.chapter.xml" />
<part xml:id="part-using">
<title>Using Nixpkgs</title>
<xi:include href="using/configuration.chapter.xml" />
<xi:include href="using/overlays.chapter.xml" />
<xi:include href="using/overrides.chapter.xml" />
</part>
<part>
<title>Nixpkgs <code>lib</code></title>
<xi:include href="functions.xml" />
<xi:include href="module-system/module-system.chapter.xml" />
</part>
<part xml:id="part-stdenv">
<title>Standard environment</title>
<xi:include href="stdenv/stdenv.chapter.xml" />
<xi:include href="stdenv/meta.chapter.xml" />
<xi:include href="stdenv/multiple-output.chapter.xml" />
<xi:include href="stdenv/cross-compilation.chapter.xml" />
<xi:include href="stdenv/platform-notes.chapter.xml" />
</part>
<part xml:id="part-builders">
<title>Builders</title>
<xi:include href="builders/fetchers.chapter.xml" />
<xi:include href="builders/trivial-builders.chapter.xml" />
<xi:include href="builders/testers.chapter.xml" />
<xi:include href="builders/special.xml" />
<xi:include href="builders/images.xml" />
<xi:include href="hooks/index.xml" />
<xi:include href="languages-frameworks/index.xml" />
<xi:include href="builders/packages/index.xml" />
</part>
<part xml:id="part-contributing">
<title>Contributing to Nixpkgs</title>
<xi:include href="contributing/quick-start.chapter.xml" />
<xi:include href="contributing/coding-conventions.chapter.xml" />
<xi:include href="contributing/submitting-changes.chapter.xml" />
<xi:include href="contributing/vulnerability-roundup.chapter.xml" />
<xi:include href="contributing/reviewing-contributions.chapter.xml" />
<xi:include href="contributing/contributing-to-documentation.chapter.xml" />
</part>
</book>

View file

@ -1,3 +0,0 @@
{ pkgs ? import ../. { } }:
(import ./default.nix { }).overrideAttrs
(x: { buildInputs = (x.buildInputs or [ ]) ++ [ pkgs.xmloscopy pkgs.ruby ]; })

9
doc/stdenv.md Normal file
View file

@ -0,0 +1,9 @@
# Standard environment {#part-stdenv}
```{=include=} chapters
stdenv/stdenv.chapter.md
stdenv/meta.chapter.md
stdenv/multiple-output.chapter.md
stdenv/cross-compilation.chapter.md
stdenv/platform-notes.chapter.md
```

View file

@ -464,10 +464,8 @@ The commit object contains the following values:
If the returned array contains exactly one object (e.g. `[{}]`), all values are optional and will be determined automatically. If the returned array contains exactly one object (e.g. `[{}]`), all values are optional and will be determined automatically.
```{=docbook} ::: {.example #var-passthru-updateScript-example-commit}
<example> # Standard output of an update script using commit feature
<title>Standard output of an update script using commit feature</title>
```
```json ```json
[ [
@ -481,10 +479,7 @@ If the returned array contains exactly one object (e.g. `[{}]`), all values are
} }
] ]
``` ```
:::
```{=docbook}
</example>
```
### Recursive attributes in `mkDerivation` {#mkderivation-recursive-attributes} ### Recursive attributes in `mkDerivation` {#mkderivation-recursive-attributes}

7
doc/using-nixpkgs.md Normal file
View file

@ -0,0 +1,7 @@
# Using Nixpkgs {#part-using}
```{=include=} chapters
using/configuration.chapter.md
using/overlays.chapter.md
using/overrides.chapter.md
```

View file

@ -185,8 +185,10 @@ You can define a function called `packageOverrides` in your local `~/.config/nix
The following attributes can be passed in [`config`](#chap-packageconfig). The following attributes can be passed in [`config`](#chap-packageconfig).
```{=docbook} ```{=include=} options
<include xmlns="http://www.w3.org/2001/XInclude" href="../doc-support/result/config-options.docbook.xml"/> id-prefix: opt-
list-id: configuration-variable-list
source: ../config-options.json
``` ```

View file

@ -1,309 +0,0 @@
[[package]]
name = "ansi_term"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "arenatree"
version = "0.1.1"
source = "git+https://gitlab.com/jD91mZM2/arenatree#f9bf7efa9a5ef4c2dd9e2acc5a4cc79a987cb648"
[[package]]
name = "arrayvec"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "atty"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "backtrace"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "backtrace-sys"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "bitflags"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cc"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "clap"
version = "2.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure_derive"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "libc"
version = "0.2.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "nixdoc"
version = "1.0.1"
dependencies = [
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rnix 0.4.1 (git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba)",
"structopt 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nodrop"
version = "0.1.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "proc-macro2"
version = "0.4.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "quote"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "redox_syscall"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "redox_termios"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rnix"
version = "0.4.1"
source = "git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba#10b86c94291b4864470158ef8750de85ddd8d4ba"
dependencies = [
"arenatree 0.1.1 (git+https://gitlab.com/jD91mZM2/arenatree)",
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-demangle"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "smol_str"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "strsim"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "structopt"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"structopt-derive 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "structopt-derive"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "syn"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synstructure"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "termion"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "textwrap"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "unicode-width"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "unicode-xid"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "vec_map"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "winapi-x86_64-pc-windows-gnu"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "xml-rs"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum arenatree 0.1.1 (git+https://gitlab.com/jD91mZM2/arenatree)" = "<none>"
"checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"
"checksum atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9a7d5b8723950951411ee34d271d99dddcc2035a16ab25310ea2c8cfd4369652"
"checksum backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "89a47830402e9981c5c41223151efcced65a0510c13097c769cede7efb34782a"
"checksum backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "c66d56ac8dabd07f6aacdaf633f4b8262f5b3601a810a0dcddffd5c22c69daa0"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
"checksum cc 1.0.25 (registry+https://github.com/rust-lang/crates.io-index)" = "f159dfd43363c4d08055a07703eb7a3406b0dac4d0584d96965a3262db3c9d16"
"checksum cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "082bb9b28e00d3c9d39cc03e64ce4cea0f1bb9b3fde493f0cbc008472d22bdf4"
"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
"checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"
"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2"
"checksum proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "3d7b7eaaa90b4a90a932a9ea6666c95a389e424eff347f0f793979289429feee"
"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5"
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum rnix 0.4.1 (git+https://gitlab.com/jD91mZM2/rnix.git?rev=10b86c94291b4864470158ef8750de85ddd8d4ba)" = "<none>"
"checksum rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "bcfe5b13211b4d78e5c2cadfebd7769197d95c639c35a50057eb4c05de811395"
"checksum smol_str 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f3ed6f19b800d76574926e458d5f8e2dbea86c2b58c08d33a982448f09ac8d0c"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum structopt 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "d77af7242f18c40fd19cb270985930f239ee1646cfb482050bbae9da1d18743b"
"checksum structopt-derive 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "17ff01fe96de9d16e7372ae5f19dd7ece2c703b51043c3db9ea27f9e393ea311"
"checksum syn 0.15.15 (registry+https://github.com/rust-lang/crates.io-index)" = "0a9c2bf1e53c21704a7cce1b2a42768f1ae32a6777108a0d7f1faa4bfe7f7c04"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
"checksum textwrap 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "307686869c93e71f94da64286f9a9524c0f308a9e1c87a583de8e9c9039ad3f6"
"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
"checksum winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "92c1eb33641e276cfa214a0522acad57be5c56b10cb348b3c5117db75f3ac4b0"
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
"checksum xml-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "541b12c998c5b56aa2b4e6f18f03664eef9a4fd0a246a55594efae6cc2d964b5"

View file

@ -2,38 +2,24 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
pname = "nixdoc"; pname = "nixdoc";
version = "1.0.1"; version = "2.3.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "tazjin"; owner = "nix-community";
repo = "nixdoc"; repo = "nixdoc";
rev = "v${version}"; rev = "v${version}";
sha256 = "14d4dq06jdqazxvv7fq5872zy0capxyb0fdkp8qg06gxl1iw201s"; sha256 = "sha256-8pp6xlmdb3kZ6unTiO4yRruyEZ//GIHZF1k8f4kQr9Q=";
}; };
patches = [ cargoSha256 = "sha256-k8/+BBMjQCsrgCi33fTdiSukaAZlg6XU3NwXaJdGYVw=";
# Support nested identifiers https://github.com/nix-community/nixdoc/pull/27
(fetchpatch {
url = "https://github.com/nix-community/nixdoc/pull/27/commits/ea542735bf675fe2ccd37edaffb9138d1a8c1b7e.patch";
sha256 = "1fmz44jv2r9qsnjxvkkjfb0safy69l4x4vx1g5gisrp8nwdn94rj";
})
];
buildInputs = lib.optionals stdenv.isDarwin [ darwin.Security ]; buildInputs = lib.optionals stdenv.isDarwin [ darwin.Security ];
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"arenatree-0.1.1" = "sha256-b3VVbYnWsjSjFMxvkfpJt13u+VC6baOIWD4qm1Gco4Q=";
"rnix-0.4.1" = "sha256-C1L/qXk6AimH7COrBlqpUA3giftaOYm/qNxs7rQgETA=";
};
};
meta = with lib; { meta = with lib; {
description = "Generate documentation for Nix functions"; description = "Generate documentation for Nix functions";
homepage = "https://github.com/tazjin/nixdoc"; homepage = "https://github.com/nix-community/nixdoc";
license = [ licenses.gpl3 ]; license = [ licenses.gpl3 ];
maintainers = [ maintainers.tazjin ]; maintainers = [ maintainers.asymmetric ];
platforms = platforms.unix; platforms = platforms.unix;
}; };
} }

View file

@ -184,3 +184,7 @@ class CommonMarkRenderer(Renderer):
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop() self._list_stack.pop()
return "" return ""
def image(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if title := cast(str, token.attrs.get('title', '')):
title = ' "' + title.replace('"', '\\"') + '"'
return f'![{token.content}]({token.attrs["src"]}{title})'

View file

@ -44,6 +44,9 @@ class HTMLRenderer(Renderer):
result += self._close_headings(None) result += self._close_headings(None)
return result return result
def _pull_image(self, path: str) -> str:
raise NotImplementedError()
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str: def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return escape(token.content) return escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str: def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
@ -67,7 +70,8 @@ class HTMLRenderer(Renderer):
if tokens[i + 1].type == 'link_close': if tokens[i + 1].type == 'link_close':
tag, text = "xref", xref.title_html tag, text = "xref", xref.title_html
if xref.title: if xref.title:
title = f'title="{escape(xref.title, True)}"' # titles are not attribute-safe on their own, so we need to replace quotes.
title = 'title="{}"'.format(xref.title.replace('"', '&quot;'))
target, href = "", xref.href() target, href = "", xref.href()
return f'<a class="{tag}" href="{href}" {title} {target}>{text}' return f'<a class="{tag}" href="{href}" {title} {target}>{text}'
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
@ -223,6 +227,106 @@ class HTMLRenderer(Renderer):
return '<p class="title"><strong>' return '<p class="title"><strong>'
def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '</strong></p><div class="example-contents">' return '</strong></p><div class="example-contents">'
def image(self, token: Token, tokens: Sequence[Token], i: int) -> str:
src = self._pull_image(cast(str, token.attrs['src']))
alt = f'alt="{escape(token.content, True)}"' if token.content else ""
if title := cast(str, token.attrs.get('title', '')):
title = f'title="{escape(title, True)}"'
return (
'<div class="mediaobject">'
f'<img src="{escape(src, True)}" {alt} {title} />'
'</div>'
)
def figure_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if anchor := cast(str, token.attrs.get('id', '')):
anchor = f'<a id="{escape(anchor, True)}"></a>'
return f'<div class="figure">{anchor}'
def figure_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
' </div>'
'</div><br class="figure-break" />'
)
def figure_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
'<p class="title">'
' <strong>'
)
def figure_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
' </strong>'
'</p>'
'<div class="figure-contents">'
)
def table_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
'<div class="informaltable">'
'<table class="informaltable" border="1">'
)
def table_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
'</table>'
'</div>'
)
def thead_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
cols = []
for j in range(i + 1, len(tokens)):
if tokens[j].type == 'thead_close':
break
elif tokens[j].type == 'th_open':
cols.append(cast(str, tokens[j].attrs.get('style', 'left')).removeprefix('text-align:'))
return "".join([
"<colgroup>",
"".join([ f'<col align="{col}" />' for col in cols ]),
"</colgroup>",
"<thead>",
])
def thead_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</thead>"
def tr_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<tr>"
def tr_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</tr>"
def th_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f'<th align="{cast(str, token.attrs.get("style", "left")).removeprefix("text-align:")}">'
def th_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</th>"
def tbody_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<tbody>"
def tbody_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</tbody>"
def td_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f'<td align="{cast(str, token.attrs.get("style", "left")).removeprefix("text-align:")}">'
def td_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</td>"
def footnote_ref(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = self._xref_targets[token.meta['target']].href()
id = escape(cast(str, token.attrs["id"]), True)
return (
f'<a href="{href}" class="footnote" id="{id}">'
f'<sup class="footnote">[{token.meta["id"] + 1}]</sup>'
'</a>'
)
def footnote_block_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return (
'<div class="footnotes">'
'<br />'
'<hr style="width:100; text-align:left;margin-left: 0" />'
)
def footnote_block_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def footnote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# meta id,label
id = escape(self._xref_targets[token.meta["label"]].id, True)
return f'<div id="{id}" class="footnote">'
def footnote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def footnote_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = self._xref_targets[token.meta['target']].href()
return (
f'<a href="{href}" class="para">'
f'<sup class="para">[{token.meta["id"] + 1}]</sup>'
'</a>'
)
def _make_hN(self, level: int) -> tuple[str, str]: def _make_hN(self, level: int) -> tuple[str, str]:
return f"h{min(6, max(1, level + self._hlevel_offset))}", "" return f"h{min(6, max(1, level + self._hlevel_offset))}", ""

View file

@ -1,4 +1,5 @@
import argparse import argparse
import hashlib
import html import html
import json import json
import re import re
@ -235,27 +236,48 @@ class HTMLParameters(NamedTuple):
generator: str generator: str
stylesheets: Sequence[str] stylesheets: Sequence[str]
scripts: Sequence[str] scripts: Sequence[str]
# number of levels in the rendered table of contents. tables are prepended to
# the content they apply to (entire document / document chunk / top-level section
# of a chapter), setting a depth of 0 omits the respective table.
toc_depth: int toc_depth: int
chunk_toc_depth: int chunk_toc_depth: int
section_toc_depth: int
media_dir: Path
class ManualHTMLRenderer(RendererMixin, HTMLRenderer): class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
_base_path: Path _base_path: Path
_in_dir: Path
_html_params: HTMLParameters _html_params: HTMLParameters
def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters, def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters,
manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget], manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget],
base_path: Path): in_dir: Path, base_path: Path):
super().__init__(toplevel_tag, revision, manpage_urls, xref_targets) super().__init__(toplevel_tag, revision, manpage_urls, xref_targets)
self._base_path, self._html_params = base_path, html_params self._in_dir = in_dir
self._base_path = base_path.absolute()
self._html_params = html_params
def _pull_image(self, src: str) -> str:
src_path = Path(src)
content = (self._in_dir / src_path).read_bytes()
# images may be used more than once, but we want to store them only once and
# in an easily accessible (ie, not input-file-path-dependent) location without
# having to maintain a mapping structure. hashing the file and using the hash
# as both the path of the final image provides both.
content_hash = hashlib.sha3_256(content).hexdigest()
target_name = f"{content_hash}{src_path.suffix}"
target_path = self._base_path / self._html_params.media_dir / target_name
target_path.write_bytes(content)
return f"./{self._html_params.media_dir}/{target_name}"
def _push(self, tag: str, hlevel_offset: int) -> Any: def _push(self, tag: str, hlevel_offset: int) -> Any:
result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset, self._in_dir)
self._hlevel_offset += hlevel_offset self._hlevel_offset += hlevel_offset
self._toplevel_tag, self._headings, self._attrspans = tag, [], [] self._toplevel_tag, self._headings, self._attrspans = tag, [], []
return result return result
def _pop(self, state: Any) -> None: def _pop(self, state: Any) -> None:
(self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset, self._in_dir) = state
def _render_book(self, tokens: Sequence[Token]) -> str: def _render_book(self, tokens: Sequence[Token]) -> str:
assert tokens[4].children assert tokens[4].children
@ -284,6 +306,7 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
def _file_header(self, toc: TocEntry) -> str: def _file_header(self, toc: TocEntry) -> str:
prev_link, up_link, next_link = "", "", "" prev_link, up_link, next_link = "", "", ""
prev_a, next_a, parent_title = "", "", "&nbsp;" prev_a, next_a, parent_title = "", "", "&nbsp;"
nav_html = ""
home = toc.root home = toc.root
if toc.prev: if toc.prev:
prev_link = f'<link rel="prev" href="{toc.prev.target.href()}" title="{toc.prev.target.title}" />' prev_link = f'<link rel="prev" href="{toc.prev.target.href()}" title="{toc.prev.target.title}" />'
@ -299,23 +322,8 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
if toc.next: if toc.next:
next_link = f'<link rel="next" href="{toc.next.target.href()}" title="{toc.next.target.title}" />' next_link = f'<link rel="next" href="{toc.next.target.href()}" title="{toc.next.target.title}" />'
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>' next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
return "\n".join([ if toc.prev or toc.parent or toc.next:
'<?xml version="1.0" encoding="utf-8" standalone="no"?>', nav_html = "\n".join([
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"',
' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
' <head>',
' <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />',
f' <title>{toc.target.title}</title>',
"".join((f'<link rel="stylesheet" type="text/css" href="{html.escape(style, True)}" />'
for style in self._html_params.stylesheets)),
"".join((f'<script src="{html.escape(script, True)}" type="text/javascript"></script>'
for script in self._html_params.scripts)),
f' <meta name="generator" content="{html.escape(self._html_params.generator, True)}" />',
f' <link rel="home" href="{home.target.href()}" title="{home.target.title}" />',
f' {up_link}{prev_link}{next_link}',
' </head>',
' <body>',
' <div class="navheader">', ' <div class="navheader">',
' <table width="100%" summary="Navigation header">', ' <table width="100%" summary="Navigation header">',
' <tr>', ' <tr>',
@ -330,11 +338,31 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
' <hr />', ' <hr />',
' </div>', ' </div>',
]) ])
return "\n".join([
'<?xml version="1.0" encoding="utf-8" standalone="no"?>',
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"',
' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
' <head>',
' <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />',
f' <title>{toc.target.title}</title>',
"".join((f'<link rel="stylesheet" type="text/css" href="{html.escape(style, True)}" />'
for style in self._html_params.stylesheets)),
"".join((f'<script src="{html.escape(script, True)}" type="text/javascript"></script>'
for script in self._html_params.scripts)),
f' <meta name="generator" content="{html.escape(self._html_params.generator, True)}" />',
f' <link rel="home" href="{home.target.href()}" title="{home.target.title}" />' if home.target.href() else "",
f' {up_link}{prev_link}{next_link}',
' </head>',
' <body>',
nav_html,
])
def _file_footer(self, toc: TocEntry) -> str: def _file_footer(self, toc: TocEntry) -> str:
# prev, next = self._get_prev_and_next() # prev, next = self._get_prev_and_next()
prev_a, up_a, home_a, next_a = "", "&nbsp;", "&nbsp;", "" prev_a, up_a, home_a, next_a = "", "&nbsp;", "&nbsp;", ""
prev_text, up_text, next_text = "", "", "" prev_text, up_text, next_text = "", "", ""
nav_html = ""
home = toc.root home = toc.root
if toc.prev: if toc.prev:
prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>' prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
@ -348,7 +376,8 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>' next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
assert toc.next.target.title assert toc.next.target.title
next_text = toc.next.target.title next_text = toc.next.target.title
return "\n".join([ if toc.prev or toc.parent or toc.next:
nav_html = "\n".join([
' <div class="navfooter">', ' <div class="navfooter">',
' <hr />', ' <hr />',
' <table width="100%" summary="Navigation footer">', ' <table width="100%" summary="Navigation footer">',
@ -364,6 +393,9 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
' </tr>', ' </tr>',
' </table>', ' </table>',
' </div>', ' </div>',
])
return "\n".join([
nav_html,
' </body>', ' </body>',
'</html>', '</html>',
]) ])
@ -374,7 +406,7 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
return super()._heading_tag(token, tokens, i) return super()._heading_tag(token, tokens, i)
def _build_toc(self, tokens: Sequence[Token], i: int) -> str: def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
toc = TocEntry.of(tokens[i]) toc = TocEntry.of(tokens[i])
if toc.kind == 'section': if toc.kind == 'section' and self._html_params.section_toc_depth < 1:
return "" return ""
def walk_and_emit(toc: TocEntry, depth: int) -> list[str]: def walk_and_emit(toc: TocEntry, depth: int) -> list[str]:
if depth <= 0: if depth <= 0:
@ -394,34 +426,47 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
if next_level: if next_level:
result.append(f'<dd><dl>{"".join(next_level)}</dl></dd>') result.append(f'<dd><dl>{"".join(next_level)}</dl></dd>')
return result return result
toc_depth = ( def build_list(kind: str, id: str, lst: Sequence[TocEntry]) -> str:
self._html_params.chunk_toc_depth if not lst:
if toc.starts_new_chunk and toc.kind != 'book'
else self._html_params.toc_depth
)
if not (items := walk_and_emit(toc, toc_depth)):
return "" return ""
examples = "" entries = [
if toc.examples: f'<dt>{i}. <a href="{e.target.href()}">{e.target.toc_html}</a></dt>'
examples_entries = [ for i, e in enumerate(lst, start=1)
f'<dt>{i + 1}. <a href="{ex.target.href()}">{ex.target.toc_html}</a></dt>'
for i, ex in enumerate(toc.examples)
] ]
examples = ( return (
'<div class="list-of-examples">' f'<div class="{id}">'
'<p><strong>List of Examples</strong></p>' f'<p><strong>List of {kind}</strong></p>'
f'<dl>{"".join(examples_entries)}</dl>' f'<dl>{"".join(entries)}</dl>'
'</div>' '</div>'
) )
return ( # we don't want to generate the "Title of Contents" header for sections,
f'<div class="toc">' # docbook doesn't and it's only distracting clutter unless it's the main table.
f' <p><strong>Table of Contents</strong></p>' # we also want to generate tocs only for a top-level section (ie, one that is
# not itself contained in another section)
print_title = toc.kind != 'section'
if toc.kind == 'section':
if toc.parent and toc.parent.kind == 'section':
toc_depth = 0
else:
toc_depth = self._html_params.section_toc_depth
elif toc.starts_new_chunk and toc.kind != 'book':
toc_depth = self._html_params.chunk_toc_depth
else:
toc_depth = self._html_params.toc_depth
if not (items := walk_and_emit(toc, toc_depth)):
return ""
figures = build_list("Figures", "list-of-figures", toc.figures)
examples = build_list("Examples", "list-of-examples", toc.examples)
return "".join([
f'<div class="toc">',
' <p><strong>Table of Contents</strong></p>' if print_title else "",
f' <dl class="toc">' f' <dl class="toc">'
f' {"".join(items)}' f' {"".join(items)}'
f' </dl>' f' </dl>'
f'</div>' f'</div>'
f'{figures}'
f'{examples}' f'{examples}'
) ])
def _make_hN(self, level: int) -> tuple[str, str]: def _make_hN(self, level: int) -> tuple[str, str]:
# for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate # for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate
@ -458,8 +503,10 @@ class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
# we do not set _hlevel_offset=0 because docbook doesn't either. # we do not set _hlevel_offset=0 because docbook doesn't either.
else: else:
inner = outer inner = outer
in_dir = self._in_dir
for included, path in fragments: for included, path in fragments:
try: try:
self._in_dir = (in_dir / path).parent
inner.append(self.render(included)) inner.append(self.render(included))
except Exception as e: except Exception as e:
raise RuntimeError(f"rendering {path}") from e raise RuntimeError(f"rendering {path}") from e
@ -502,8 +549,9 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
# renderer not set on purpose since it has a dependency on the output path! # renderer not set on purpose since it has a dependency on the output path!
def convert(self, infile: Path, outfile: Path) -> None: def convert(self, infile: Path, outfile: Path) -> None:
self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params, self._renderer = ManualHTMLRenderer(
self._manpage_urls, self._xref_targets, outfile.parent) 'book', self._revision, self._html_params, self._manpage_urls, self._xref_targets,
infile.parent, outfile.parent)
super().convert(infile, outfile) super().convert(infile, outfile)
def _parse(self, src: str) -> list[Token]: def _parse(self, src: str) -> list[Token]:
@ -525,23 +573,24 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
self._redirection_targets.add(into) self._redirection_targets.add(into)
return tokens return tokens
def _number_examples(self, tokens: Sequence[Token], start: int = 1) -> int: def _number_block(self, block: str, prefix: str, tokens: Sequence[Token], start: int = 1) -> int:
title_open, title_close = f'{block}_title_open', f'{block}_title_close'
for (i, token) in enumerate(tokens): for (i, token) in enumerate(tokens):
if token.type == "example_title_open": if token.type == title_open:
title = tokens[i + 1] title = tokens[i + 1]
assert title.type == 'inline' and title.children assert title.type == 'inline' and title.children
# the prefix is split into two tokens because the xref title_html will want # the prefix is split into two tokens because the xref title_html will want
# only the first of the two, but both must be rendered into the example itself. # only the first of the two, but both must be rendered into the example itself.
title.children = ( title.children = (
[ [
Token('text', '', 0, content=f'Example {start}'), Token('text', '', 0, content=f'{prefix} {start}'),
Token('text', '', 0, content='. ') Token('text', '', 0, content='. ')
] + title.children ] + title.children
) )
start += 1 start += 1
elif token.type.startswith('included_') and token.type != 'included_options': elif token.type.startswith('included_') and token.type != 'included_options':
for sub, _path in token.meta['included']: for sub, _path in token.meta['included']:
start = self._number_examples(sub, start) start = self._number_block(block, prefix, sub, start)
return start return start
# xref | (id, type, heading inlines, file, starts new file) # xref | (id, type, heading inlines, file, starts new file)
@ -567,6 +616,12 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file) result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file)
elif bt.type == 'example_open' and (id := cast(str, bt.attrs.get('id', ''))): elif bt.type == 'example_open' and (id := cast(str, bt.attrs.get('id', ''))):
result.append((id, 'example', tokens[i + 2], target_file, False)) result.append((id, 'example', tokens[i + 2], target_file, False))
elif bt.type == 'figure_open' and (id := cast(str, bt.attrs.get('id', ''))):
result.append((id, 'figure', tokens[i + 2], target_file, False))
elif bt.type == 'footnote_open' and (id := cast(str, bt.attrs.get('id', ''))):
result.append(XrefTarget(id, "???", None, None, target_file))
elif bt.type == 'footnote_ref' and (id := cast(str, bt.attrs.get('id', ''))):
result.append(XrefTarget(id, "???", None, None, target_file))
elif bt.type == 'inline': elif bt.type == 'inline':
assert bt.children assert bt.children
result += self._collect_ids(bt.children, target_file, typ, False) result += self._collect_ids(bt.children, target_file, typ, False)
@ -591,8 +646,8 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
title = prefix + title_html title = prefix + title_html
toc_html = f"{n}. {title_html}" toc_html = f"{n}. {title_html}"
title_html = f"Appendix&nbsp;{n}" title_html = f"Appendix&nbsp;{n}"
elif typ == 'example': elif typ in ['example', 'figure']:
# skip the prepended `Example N. ` from _number_examples # skip the prepended `{Example,Figure} N. ` from numbering
toc_html, title = self._renderer.renderInline(inlines.children[2:]), title_html toc_html, title = self._renderer.renderInline(inlines.children[2:]), title_html
# xref title wants only the prepended text, sans the trailing colon and space # xref title wants only the prepended text, sans the trailing colon and space
title_html = self._renderer.renderInline(inlines.children[0:1]) title_html = self._renderer.renderInline(inlines.children[0:1])
@ -607,7 +662,8 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment) return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment)
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None: def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
self._number_examples(tokens) self._number_block('example', "Example", tokens)
self._number_block('figure', "Figure", tokens)
xref_queue = self._collect_ids(tokens, outfile.name, 'book', True) xref_queue = self._collect_ids(tokens, outfile.name, 'book', True)
failed = False failed = False
@ -629,6 +685,22 @@ class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
failed = True # do another round and report the first error failed = True # do another round and report the first error
xref_queue = deferred xref_queue = deferred
paths_seen = set()
for t in self._xref_targets.values():
paths_seen.add(t.path)
if len(paths_seen) == 1:
for (k, t) in self._xref_targets.items():
self._xref_targets[k] = XrefTarget(
t.id,
t.title_html,
t.toc_html,
t.title,
t.path,
t.drop_fragment,
drop_target=True
)
TocEntry.collect_and_link(self._xref_targets, tokens) TocEntry.collect_and_link(self._xref_targets, tokens)
@ -647,6 +719,8 @@ def _build_cli_html(p: argparse.ArgumentParser) -> None:
p.add_argument('--script', default=[], action='append') p.add_argument('--script', default=[], action='append')
p.add_argument('--toc-depth', default=1, type=int) p.add_argument('--toc-depth', default=1, type=int)
p.add_argument('--chunk-toc-depth', default=1, type=int) p.add_argument('--chunk-toc-depth', default=1, type=int)
p.add_argument('--section-toc-depth', default=0, type=int)
p.add_argument('--media-dir', default="media", type=Path)
p.add_argument('infile', type=Path) p.add_argument('infile', type=Path)
p.add_argument('outfile', type=Path) p.add_argument('outfile', type=Path)
@ -660,7 +734,7 @@ def _run_cli_html(args: argparse.Namespace) -> None:
md = HTMLConverter( md = HTMLConverter(
args.revision, args.revision,
HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth, HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth,
args.chunk_toc_depth), args.chunk_toc_depth, args.section_toc_depth, args.media_dir),
json.load(manpage_urls)) json.load(manpage_urls))
md.convert(args.infile, args.outfile) md.convert(args.infile, args.outfile)

View file

@ -14,7 +14,7 @@ from .utils import Freezeable
FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix'] FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix']
# in the TOC all fragments are allowed, plus the all-encompassing book. # in the TOC all fragments are allowed, plus the all-encompassing book.
TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix', 'example'] TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix', 'example', 'figure']
def is_include(token: Token) -> bool: def is_include(token: Token) -> bool:
return token.type == "fence" and token.info.startswith("{=include=} ") return token.type == "fence" and token.info.startswith("{=include=} ")
@ -110,9 +110,12 @@ class XrefTarget:
path: str path: str
"""whether to drop the `#anchor` from links when expanding xrefs""" """whether to drop the `#anchor` from links when expanding xrefs"""
drop_fragment: bool = False drop_fragment: bool = False
"""whether to drop the `path.html` from links when expanding xrefs.
mostly useful for docbook compatibility"""
drop_target: bool = False
def href(self) -> str: def href(self) -> str:
path = html.escape(self.path, True) path = "" if self.drop_target else html.escape(self.path, True)
return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}" return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}"
@dc.dataclass @dc.dataclass
@ -125,6 +128,7 @@ class TocEntry(Freezeable):
children: list[TocEntry] = dc.field(default_factory=list) children: list[TocEntry] = dc.field(default_factory=list)
starts_new_chunk: bool = False starts_new_chunk: bool = False
examples: list[TocEntry] = dc.field(default_factory=list) examples: list[TocEntry] = dc.field(default_factory=list)
figures: list[TocEntry] = dc.field(default_factory=list)
@property @property
def root(self) -> TocEntry: def root(self) -> TocEntry:
@ -139,7 +143,7 @@ class TocEntry(Freezeable):
@classmethod @classmethod
def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry: def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry:
entries, examples = cls._collect_entries(xrefs, tokens, 'book') entries, examples, figures = cls._collect_entries(xrefs, tokens, 'book')
def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]: def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]:
this.parent = parent this.parent = parent
@ -157,6 +161,7 @@ class TocEntry(Freezeable):
paths_seen.add(c.target.path) paths_seen.add(c.target.path)
flat[0].examples = examples flat[0].examples = examples
flat[0].figures = figures
for c in flat: for c in flat:
c.freeze() c.freeze()
@ -165,21 +170,23 @@ class TocEntry(Freezeable):
@classmethod @classmethod
def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token], def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token],
kind: TocEntryType) -> tuple[TocEntry, list[TocEntry]]: kind: TocEntryType) -> tuple[TocEntry, list[TocEntry], list[TocEntry]]:
# we assume that check_structure has been run recursively over the entire input. # we assume that check_structure has been run recursively over the entire input.
# list contains (tag, entry) pairs that will collapse to a single entry for # list contains (tag, entry) pairs that will collapse to a single entry for
# the full sequence. # the full sequence.
entries: list[tuple[str, TocEntry]] = [] entries: list[tuple[str, TocEntry]] = []
examples: list[TocEntry] = [] examples: list[TocEntry] = []
figures: list[TocEntry] = []
for token in tokens: for token in tokens:
if token.type.startswith('included_') and (included := token.meta.get('included')): if token.type.startswith('included_') and (included := token.meta.get('included')):
fragment_type_str = token.type[9:].removesuffix('s') fragment_type_str = token.type[9:].removesuffix('s')
assert fragment_type_str in get_args(TocEntryType) assert fragment_type_str in get_args(TocEntryType)
fragment_type = cast(TocEntryType, fragment_type_str) fragment_type = cast(TocEntryType, fragment_type_str)
for fragment, _path in included: for fragment, _path in included:
subentries, subexamples = cls._collect_entries(xrefs, fragment, fragment_type) subentries, subexamples, subfigures = cls._collect_entries(xrefs, fragment, fragment_type)
entries[-1][1].children.append(subentries) entries[-1][1].children.append(subentries)
examples += subexamples examples += subexamples
figures += subfigures
elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))): elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))):
while len(entries) > 1 and entries[-1][0] >= token.tag: while len(entries) > 1 and entries[-1][0] >= token.tag:
entries[-2][1].children.append(entries.pop()[1]) entries[-2][1].children.append(entries.pop()[1])
@ -188,7 +195,9 @@ class TocEntry(Freezeable):
token.meta['TocEntry'] = entries[-1][1] token.meta['TocEntry'] = entries[-1][1]
elif token.type == 'example_open' and (id := cast(str, token.attrs.get('id', ''))): elif token.type == 'example_open' and (id := cast(str, token.attrs.get('id', ''))):
examples.append(TocEntry('example', xrefs[id])) examples.append(TocEntry('example', xrefs[id]))
elif token.type == 'figure_open' and (id := cast(str, token.attrs.get('id', ''))):
figures.append(TocEntry('figure', xrefs[id]))
while len(entries) > 1: while len(entries) > 1:
entries[-2][1].children.append(entries.pop()[1]) entries[-2][1].children.append(entries.pop()[1])
return (entries[0][1], examples) return (entries[0][1], examples, figures)

View file

@ -1,6 +1,6 @@
from abc import ABC from abc import ABC
from collections.abc import Mapping, MutableMapping, Sequence from collections.abc import Mapping, MutableMapping, Sequence
from typing import Any, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar
import dataclasses import dataclasses
import re import re
@ -12,6 +12,7 @@ from markdown_it.token import Token
from markdown_it.utils import OptionsDict from markdown_it.utils import OptionsDict
from mdit_py_plugins.container import container_plugin # type: ignore[attr-defined] from mdit_py_plugins.container import container_plugin # type: ignore[attr-defined]
from mdit_py_plugins.deflist import deflist_plugin # type: ignore[attr-defined] from mdit_py_plugins.deflist import deflist_plugin # type: ignore[attr-defined]
from mdit_py_plugins.footnote import footnote_plugin # type: ignore[attr-defined]
from mdit_py_plugins.myst_role import myst_role_plugin # type: ignore[attr-defined] from mdit_py_plugins.myst_role import myst_role_plugin # type: ignore[attr-defined]
_md_escape_table = { _md_escape_table = {
@ -40,7 +41,7 @@ def md_make_code(code: str, info: str = "", multiline: Optional[bool] = None) ->
ticks, sep = ('`' * (longest + (3 if multiline else 1)), '\n' if multiline else ' ') ticks, sep = ('`' * (longest + (3 if multiline else 1)), '\n' if multiline else ' ')
return f"{ticks}{info}{sep}{code}{sep}{ticks}" return f"{ticks}{info}{sep}{code}{sep}{ticks}"
AttrBlockKind = Literal['admonition', 'example'] AttrBlockKind = Literal['admonition', 'example', 'figure']
AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"] AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"]
@ -90,6 +91,29 @@ class Renderer:
"example_close": self.example_close, "example_close": self.example_close,
"example_title_open": self.example_title_open, "example_title_open": self.example_title_open,
"example_title_close": self.example_title_close, "example_title_close": self.example_title_close,
"image": self.image,
"figure_open": self.figure_open,
"figure_close": self.figure_close,
"figure_title_open": self.figure_title_open,
"figure_title_close": self.figure_title_close,
"table_open": self.table_open,
"table_close": self.table_close,
"thead_open": self.thead_open,
"thead_close": self.thead_close,
"tr_open": self.tr_open,
"tr_close": self.tr_close,
"th_open": self.th_open,
"th_close": self.th_close,
"tbody_open": self.tbody_open,
"tbody_close": self.tbody_close,
"td_open": self.td_open,
"td_close": self.td_close,
"footnote_ref": self.footnote_ref,
"footnote_block_open": self.footnote_block_open,
"footnote_block_close": self.footnote_block_close,
"footnote_open": self.footnote_open,
"footnote_close": self.footnote_close,
"footnote_anchor": self.footnote_anchor,
} }
self._admonitions = { self._admonitions = {
@ -225,6 +249,52 @@ class Renderer:
raise RuntimeError("md token not supported", token) raise RuntimeError("md token not supported", token)
def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str: def example_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token) raise RuntimeError("md token not supported", token)
def image(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def figure_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def figure_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def figure_title_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def figure_title_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def table_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def table_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def thead_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def thead_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tr_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tr_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def th_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def th_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tbody_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tbody_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def td_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def td_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_ref(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_block_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_block_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def footnote_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def _is_escaped(src: str, pos: int) -> bool: def _is_escaped(src: str, pos: int) -> bool:
found = 0 found = 0
@ -267,6 +337,8 @@ def _parse_blockattrs(info: str) -> Optional[tuple[AttrBlockKind, Optional[str],
return ('admonition', id, classes) return ('admonition', id, classes)
if classes == ['example']: if classes == ['example']:
return ('example', id, classes) return ('example', id, classes)
elif classes == ['figure']:
return ('figure', id, classes)
return None return None
def _attr_span_plugin(md: markdown_it.MarkdownIt) -> None: def _attr_span_plugin(md: markdown_it.MarkdownIt) -> None:
@ -368,6 +440,32 @@ def _heading_ids(md: markdown_it.MarkdownIt) -> None:
md.core.ruler.before("replacements", "heading_ids", heading_ids) md.core.ruler.before("replacements", "heading_ids", heading_ids)
def _footnote_ids(md: markdown_it.MarkdownIt) -> None:
"""generate ids for footnotes, their refs, and their backlinks. the ids we
generate here are derived from the footnote label, making numeric footnote
labels invalid.
"""
def generate_ids(tokens: Sequence[Token]) -> None:
for token in tokens:
if token.type == 'footnote_open':
if token.meta["label"][:1].isdigit():
assert token.map
raise RuntimeError(f"invalid footnote label in line {token.map[0] + 1}")
token.attrs['id'] = token.meta["label"]
elif token.type == 'footnote_anchor':
token.meta['target'] = f'{token.meta["label"]}.__back.{token.meta["subId"]}'
elif token.type == 'footnote_ref':
token.attrs['id'] = f'{token.meta["label"]}.__back.{token.meta["subId"]}'
token.meta['target'] = token.meta["label"]
elif token.type == 'inline':
assert token.children
generate_ids(token.children)
def footnote_ids(state: markdown_it.rules_core.StateCore) -> None:
generate_ids(state.tokens)
md.core.ruler.after("footnote_tail", "footnote_ids", footnote_ids)
def _compact_list_attr(md: markdown_it.MarkdownIt) -> None: def _compact_list_attr(md: markdown_it.MarkdownIt) -> None:
@dataclasses.dataclass @dataclasses.dataclass
class Entry: class Entry:
@ -416,6 +514,11 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None:
if id is not None: if id is not None:
token.attrs['id'] = id token.attrs['id'] = id
stack.append('example_close') stack.append('example_close')
elif kind == 'figure':
token.type = 'figure_open'
if id is not None:
token.attrs['id'] = id
stack.append('figure_close')
else: else:
assert_never(kind) assert_never(kind)
elif token.type == 'container_blockattr_close': elif token.type == 'container_blockattr_close':
@ -423,31 +526,37 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None:
md.core.ruler.push("block_attr", block_attr) md.core.ruler.push("block_attr", block_attr)
def _example_titles(md: markdown_it.MarkdownIt) -> None: def _block_titles(block: str) -> Callable[[markdown_it.MarkdownIt], None]:
open, close = f'{block}_open', f'{block}_close'
title_open, title_close = f'{block}_title_open', f'{block}_title_close'
""" """
find title headings of examples and stick them into meta for renderers, then find title headings of blocks and stick them into meta for renderers, then
remove them from the token stream. also checks whether any example contains a remove them from the token stream. also checks whether any block contains a
non-title heading since those would make toc generation extremely complicated. non-title heading since those would make toc generation extremely complicated.
""" """
def example_titles(state: markdown_it.rules_core.StateCore) -> None: def block_titles(state: markdown_it.rules_core.StateCore) -> None:
in_example = [False] in_example = [False]
for i, token in enumerate(state.tokens): for i, token in enumerate(state.tokens):
if token.type == 'example_open': if token.type == open:
if state.tokens[i + 1].type == 'heading_open': if state.tokens[i + 1].type == 'heading_open':
assert state.tokens[i + 3].type == 'heading_close' assert state.tokens[i + 3].type == 'heading_close'
state.tokens[i + 1].type = 'example_title_open' state.tokens[i + 1].type = title_open
state.tokens[i + 3].type = 'example_title_close' state.tokens[i + 3].type = title_close
else: else:
assert token.map assert token.map
raise RuntimeError(f"found example without title in line {token.map[0] + 1}") raise RuntimeError(f"found {block} without title in line {token.map[0] + 1}")
in_example.append(True) in_example.append(True)
elif token.type == 'example_close': elif token.type == close:
in_example.pop() in_example.pop()
elif token.type == 'heading_open' and in_example[-1]: elif token.type == 'heading_open' and in_example[-1]:
assert token.map assert token.map
raise RuntimeError(f"unexpected non-title heading in example in line {token.map[0] + 1}") raise RuntimeError(f"unexpected non-title heading in {block} in line {token.map[0] + 1}")
md.core.ruler.push("example_titles", example_titles) def do_add(md: markdown_it.MarkdownIt) -> None:
md.core.ruler.push(f"{block}_titles", block_titles)
return do_add
TR = TypeVar('TR', bound='Renderer') TR = TypeVar('TR', bound='Renderer')
@ -478,20 +587,24 @@ class Converter(ABC, Generic[TR]):
}, },
renderer_cls=self.ForbiddenRenderer renderer_cls=self.ForbiddenRenderer
) )
self._md.enable('table')
self._md.use( self._md.use(
container_plugin, container_plugin,
name="blockattr", name="blockattr",
validate=lambda name, *args: _parse_blockattrs(name), validate=lambda name, *args: _parse_blockattrs(name),
) )
self._md.use(deflist_plugin) self._md.use(deflist_plugin)
self._md.use(footnote_plugin)
self._md.use(myst_role_plugin) self._md.use(myst_role_plugin)
self._md.use(_attr_span_plugin) self._md.use(_attr_span_plugin)
self._md.use(_inline_comment_plugin) self._md.use(_inline_comment_plugin)
self._md.use(_block_comment_plugin) self._md.use(_block_comment_plugin)
self._md.use(_heading_ids) self._md.use(_heading_ids)
self._md.use(_footnote_ids)
self._md.use(_compact_list_attr) self._md.use(_compact_list_attr)
self._md.use(_block_attr) self._md.use(_block_attr)
self._md.use(_example_titles) self._md.use(_block_titles("example"))
self._md.use(_block_titles("figure"))
self._md.enable(["smartquotes", "replacements"]) self._md.enable(["smartquotes", "replacements"])
def _parse(self, src: str) -> list[Token]: def _parse(self, src: str) -> list[Token]:

View file

@ -91,3 +91,9 @@ some nested anchors
- *more stuff in same deflist* - *more stuff in same deflist*
       
foo""".replace(' ', ' ') foo""".replace(' ', ' ')
def test_images() -> None:
c = Converter({})
assert c._render("![*alt text*](foo \"title \\\"quoted\\\" text\")") == (
"![*alt text*](foo \"title \\\"quoted\\\" text\")"
)

View file

@ -1,12 +1,17 @@
import nixos_render_docs as nrd import nixos_render_docs as nrd
import pytest import pytest
import textwrap
from sample_md import sample1 from sample_md import sample1
class Renderer(nrd.html.HTMLRenderer):
def _pull_image(self, src: str) -> str:
return src
class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]): class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]):
def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]): def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]):
super().__init__() super().__init__()
self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs) self._renderer = Renderer(manpage_urls, xrefs)
def unpretty(s: str) -> str: def unpretty(s: str) -> str:
return "".join(map(str.strip, s.splitlines())).replace('', ' ').replace('', '\n') return "".join(map(str.strip, s.splitlines())).replace('', ' ').replace('', '\n')
@ -69,6 +74,78 @@ def test_xrefs() -> None:
c._render("[](#baz)") c._render("[](#baz)")
assert exc.value.args[0] == 'bad local reference, id #baz not known' assert exc.value.args[0] == 'bad local reference, id #baz not known'
def test_images() -> None:
c = Converter({}, {})
assert c._render("![*alt text*](foo \"title text\")") == unpretty("""
<p>
<div class="mediaobject">
<img src="foo" alt="*alt text*" title="title text" />
</div>
</p>
""")
def test_tables() -> None:
c = Converter({}, {})
assert c._render(textwrap.dedent("""
| d | l | m | r |
|---|:--|:-:|--:|
| a | b | c | d |
""")) == unpretty("""
<div class="informaltable">
<table class="informaltable" border="1">
<colgroup>
<col align="left" />
<col align="left" />
<col align="center" />
<col align="right" />
</colgroup>
<thead>
<tr>
<th align="left">d</th>
<th align="left">l</th>
<th align="center">m</th>
<th align="right">r</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">a</td>
<td align="left">b</td>
<td align="center">c</td>
<td align="right">d</td>
</tr>
</tbody>
</table>
</div>
""")
def test_footnotes() -> None:
c = Converter({}, {
"bar": nrd.manual_structure.XrefTarget("bar", "", None, None, ""),
"bar.__back.0": nrd.manual_structure.XrefTarget("bar.__back.0", "", None, None, ""),
"bar.__back.1": nrd.manual_structure.XrefTarget("bar.__back.1", "", None, None, ""),
})
assert c._render(textwrap.dedent("""
foo [^bar] baz [^bar]
[^bar]: note
""")) == unpretty("""
<p>
foo <a href="#bar" class="footnote" id="bar.__back.0"><sup class="footnote">[1]</sup></a>
baz <a href="#bar" class="footnote" id="bar.__back.1"><sup class="footnote">[1]</sup></a>
</p>
<div class="footnotes">
<br />
<hr style="width:100; text-align:left;margin-left: 0" />
<div id="bar" class="footnote">
<p>
note<a href="#bar.__back.0" class="para"><sup class="para">[1]</sup></a>
<a href="#bar.__back.1" class="para"><sup class="para">[1]</sup></a>
</p>
</div>
</div>
""")
def test_full() -> None: def test_full() -> None:
c = Converter({ 'man(1)': 'http://example.org' }, {}) c = Converter({ 'man(1)': 'http://example.org' }, {})
assert c._render(sample1) == unpretty(""" assert c._render(sample1) == unpretty("""

View file

@ -501,3 +501,28 @@ def test_example() -> None:
with pytest.raises(RuntimeError) as exc: with pytest.raises(RuntimeError) as exc:
c._parse("::: {.example}\n### foo\n### bar\n:::") c._parse("::: {.example}\n### foo\n### bar\n:::")
assert exc.value.args[0] == 'unexpected non-title heading in example in line 3' assert exc.value.args[0] == 'unexpected non-title heading in example in line 3'
def test_footnotes() -> None:
c = Converter({})
assert c._parse("text [^foo]\n\n[^foo]: bar") == [
Token(type='paragraph_open', tag='p', nesting=1, map=[0, 1], block=True),
Token(type='inline', tag='', nesting=0, map=[0, 1], level=1, content='text [^foo]', block=True,
children=[
Token(type='text', tag='', nesting=0, content='text '),
Token(type='footnote_ref', tag='', nesting=0, attrs={'id': 'foo.__back.0'},
meta={'id': 0, 'subId': 0, 'label': 'foo', 'target': 'foo'})
]),
Token(type='paragraph_close', tag='p', nesting=-1, block=True),
Token(type='footnote_block_open', tag='', nesting=1),
Token(type='footnote_open', tag='', nesting=1, attrs={'id': 'foo'}, meta={'id': 0, 'label': 'foo'}),
Token(type='paragraph_open', tag='p', nesting=1, map=[2, 3], level=1, block=True, hidden=False),
Token(type='inline', tag='', nesting=0, map=[2, 3], level=2, content='bar', block=True,
children=[
Token(type='text', tag='', nesting=0, content='bar')
]),
Token(type='footnote_anchor', tag='', nesting=0,
meta={'id': 0, 'label': 'foo', 'subId': 0, 'target': 'foo.__back.0'}),
Token(type='paragraph_close', tag='p', nesting=-1, level=1, block=True),
Token(type='footnote_close', tag='', nesting=-1),
Token(type='footnote_block_close', tag='', nesting=-1),
]