Merge pull request #217342 from pennae/nrd-html-manual

nixos-render-docs: add manual html renderer, use it for the nixos manual
This commit is contained in:
Naïm Favier 2023-03-04 12:58:38 +01:00 committed by GitHub
commit 45e44c56f1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 1723 additions and 769 deletions

View file

@ -135,28 +135,32 @@ let
}
'';
prepareManualFromMD = ''
cp -r --no-preserve=all $inputs/* .
substituteInPlace ./manual.md \
--replace '@NIXOS_VERSION@' "${version}"
substituteInPlace ./configuration/configuration.md \
--replace \
'@MODULE_CHAPTERS@' \
${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
substituteInPlace ./nixos-options.md \
--replace \
'@NIXOS_OPTIONS_JSON@' \
${optionsDoc.optionsJSON}/share/doc/nixos/options.json
substituteInPlace ./development/writing-nixos-tests.section.md \
--replace \
'@NIXOS_TEST_OPTIONS_JSON@' \
${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
'';
manual-combined = runCommand "nixos-manual-combined"
{ inputs = lib.sourceFilesBySuffices ./. [ ".xml" ".md" ];
nativeBuildInputs = [ pkgs.nixos-render-docs pkgs.libxml2.bin pkgs.libxslt.bin ];
meta.description = "The NixOS manual as plain docbook XML";
}
''
cp -r --no-preserve=all $inputs/* .
substituteInPlace ./manual.md \
--replace '@NIXOS_VERSION@' "${version}"
substituteInPlace ./configuration/configuration.md \
--replace \
'@MODULE_CHAPTERS@' \
${lib.escapeShellArg (lib.concatMapStringsSep "\n" (p: "${p.value}") config.meta.doc)}
substituteInPlace ./nixos-options.md \
--replace \
'@NIXOS_OPTIONS_JSON@' \
${optionsDoc.optionsJSON}/share/doc/nixos/options.json
substituteInPlace ./development/writing-nixos-tests.section.md \
--replace \
'@NIXOS_TEST_OPTIONS_JSON@' \
${testOptionsDoc.optionsJSON}/share/doc/nixos/options.json
${prepareManualFromMD}
nixos-render-docs -j $NIX_BUILD_CORES manual docbook \
--manpage-urls ${manpageUrls} \
@ -193,7 +197,14 @@ in rec {
# Generate the NixOS manual.
manualHTML = runCommand "nixos-manual-html"
{ nativeBuildInputs = [ buildPackages.libxml2.bin buildPackages.libxslt.bin ];
{ nativeBuildInputs =
if allowDocBook then [
buildPackages.libxml2.bin
buildPackages.libxslt.bin
] else [
buildPackages.nixos-render-docs
];
inputs = lib.optionals (! allowDocBook) (lib.sourceFilesBySuffices ./. [ ".md" ]);
meta.description = "The NixOS manual in HTML format";
allowedReferences = ["out"];
}
@ -201,23 +212,44 @@ in rec {
# Generate the HTML manual.
dst=$out/share/doc/nixos
mkdir -p $dst
xsltproc \
${manualXsltprocOptions} \
--stringparam id.warnings "1" \
--nonet --output $dst/ \
${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
${manual-combined}/manual-combined.xml \
|& tee xsltproc.out
grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
rm xsltproc.out
mkdir -p $dst/images/callouts
cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
cp ${../../../doc/style.css} $dst/style.css
cp ${../../../doc/overrides.css} $dst/overrides.css
cp -r ${pkgs.documentation-highlighter} $dst/highlightjs
${if allowDocBook then ''
xsltproc \
${manualXsltprocOptions} \
--stringparam id.warnings "1" \
--nonet --output $dst/ \
${docbook_xsl_ns}/xml/xsl/docbook/xhtml/chunktoc.xsl \
${manual-combined}/manual-combined.xml \
|& tee xsltproc.out
grep "^ID recommended on" xsltproc.out &>/dev/null && echo "error: some IDs are missing" && false
rm xsltproc.out
mkdir -p $dst/images/callouts
cp ${docbook_xsl_ns}/xml/xsl/docbook/images/callouts/*.svg $dst/images/callouts/
'' else ''
${prepareManualFromMD}
# TODO generator is set like this because the docbook/md manual compare workflow will
# trigger if it's different
nixos-render-docs -j $NIX_BUILD_CORES manual html \
--manpage-urls ${manpageUrls} \
--revision ${lib.escapeShellArg revision} \
--generator "DocBook XSL Stylesheets V${docbook_xsl_ns.version}" \
--stylesheet style.css \
--stylesheet overrides.css \
--stylesheet highlightjs/mono-blue.css \
--script ./highlightjs/highlight.pack.js \
--script ./highlightjs/loader.js \
--toc-depth 1 \
--chunk-toc-depth 1 \
./manual.md \
$dst/index.html
''}
mkdir -p $out/nix-support
echo "nix-build out $out" >> $out/nix-support/hydra-build-products
echo "doc manual $dst" >> $out/nix-support/hydra-build-products

View file

@ -47,7 +47,10 @@ development/development.md
contributing-to-this-manual.chapter.md
```
```{=include=} appendix
```{=include=} appendix html:into-file=//options.html
nixos-options.md
```
```{=include=} appendix html:into-file=//release-notes.html
release-notes/release-notes.md
```

View file

@ -318,8 +318,8 @@ to make packages available in the chroot.
{option}`services.systemd.akkoma.serviceConfig.BindPaths` and
{option}`services.systemd.akkoma.serviceConfig.BindReadOnlyPaths` permit access to outside paths
through bind mounts. Refer to
[{manpage}`systemd.exec(5)`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
for details.
[`BindPaths=`](https://www.freedesktop.org/software/systemd/man/systemd.exec.html#BindPaths=)
of {manpage}`systemd.exec(5)` for details.
### Distributed deployment {#modules-services-akkoma-distributed-deployment}

View file

@ -1948,7 +1948,7 @@ in
Extra command-line arguments to pass to systemd-networkd-wait-online.
These also affect per-interface `systemd-network-wait-online@` services.
See [{manpage}`systemd-networkd-wait-online.service(8)`](https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.service.html) for all available options.
See {manpage}`systemd-networkd-wait-online.service(8)` for all available options.
'';
type = with types; listOf str;
default = [];

View file

@ -1,13 +1,11 @@
from collections.abc import Mapping, MutableMapping, Sequence
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from urllib.parse import quote
from .md import Renderer
import markdown_it
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
_asciidoc_escapes = {
# escape all dots, just in case one is pasted at SOL
@ -59,8 +57,8 @@ class AsciiDocRenderer(Renderer):
_list_stack: list[List]
_attrspans: list[str]
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
super().__init__(manpage_urls, parser)
def __init__(self, manpage_urls: Mapping[str, str]):
super().__init__(manpage_urls)
self._parstack = [ Par("\n\n", "====") ]
self._list_stack = []
self._attrspans = []
@ -96,142 +94,103 @@ class AsciiDocRenderer(Renderer):
self._list_stack.pop()
return ""
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return asciidoc_escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " +\n"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" "
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"``{asciidoc_escape(token.content)}``"
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return self.fence(token, tokens, i, options, env)
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self.fence(token, tokens, i)
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"link:{quote(cast(str, token.attrs['href']), safe='/:')}["
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "]"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
# allow the next token to be a block or an inline.
return f'\n{self._list_stack[-1].head} {{empty}}'
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '*')
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "__"
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
attrs = f"[source,{token.info}]\n" if token.info else ""
code = token.content
if code.endswith('\n'):
code = code[:-1]
return f"{self._break(True)}{attrs}----\n{code}\n----"
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._break(True)
self._enter_block(False)
return f"{pbreak}[quote]\n{self._parstack[-2].block_delim}\n"
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return f"\n{self._parstack[-1].block_delim}"
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("NOTE")
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("CAUTION")
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("IMPORTANT")
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("TIP")
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("WARNING")
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{self._break()}[]"
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block(True)
return ":: {empty}"
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return "\n"
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = asciidoc_escape(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"link:{quote(url, safe='/:')}[{content}]"
return f"[.{token.meta['name']}]``{asciidoc_escape(token.content)}``"
def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def inline_anchor(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return f"[[{token.attrs['id']}]]"
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
(id_part, class_part) = ("", "")
if id := token.attrs.get('id'):
@ -241,22 +200,17 @@ class AsciiDocRenderer(Renderer):
class_part = "kbd:["
self._attrspans.append("]")
else:
return super().attr_span_begin(token, tokens, i, options, env)
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup.replace("#", "=") + " "
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_open(token, '.')
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._list_close()

View file

@ -1,12 +1,10 @@
from collections.abc import Mapping, MutableMapping, Sequence
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Optional
from .md import md_escape, md_make_code, Renderer
import markdown_it
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
@dataclass(kw_only=True)
class List:
@ -26,8 +24,8 @@ class CommonMarkRenderer(Renderer):
_link_stack: list[str]
_list_stack: list[List]
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
super().__init__(manpage_urls, parser)
def __init__(self, manpage_urls: Mapping[str, str]):
super().__init__(manpage_urls)
self._parstack = [ Par("") ]
self._link_stack = []
self._list_stack = []
@ -58,39 +56,29 @@ class CommonMarkRenderer(Renderer):
return s
return f"\n{self._parstack[-1].indent}".join(s.splitlines())
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return self._indent_raw(md_escape(token.content))
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f" {self._break()}"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._break()
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return md_make_code(token.content)
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return self.fence(token, tokens, i, options, env)
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self.fence(token, tokens, i)
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
self._link_stack.append(cast(str, token.attrs['href']))
return "["
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"]({md_escape(self._link_stack.pop())})"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
lst = self._list_stack[-1]
lbreak = "" if not lst.first_item_seen else self._break() * (1 if lst.compact else 2)
lst.first_item_seen = True
@ -100,132 +88,99 @@ class CommonMarkRenderer(Renderer):
lst.next_idx += 1
self._enter_block(" " * (len(head) + 1))
return f'{lbreak}{head} '
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "*"
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "**"
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
code = token.content
if code.endswith('\n'):
code = code[:-1]
pbreak = self._maybe_parbreak()
return pbreak + self._indent_raw(md_make_code(code, info=token.info, multiline=True))
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block("> ")
return pbreak + "> "
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Caution")
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Important")
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Tip")
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Warning")
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(compact=False))
return ""
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
pbreak = self._maybe_parbreak()
self._enter_block(" ")
# add an opening zero-width non-joiner to separate *our* emphasis from possible
# emphasis in the provided term
return f'{pbreak} - *{chr(0x200C)}'
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"{chr(0x200C)}*"
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
return ""
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ""
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._parstack[-1].continuing = True
content = md_make_code(token.content)
if token.meta['name'] == 'manpage' and (url := self._manpage_urls.get(token.content)):
return f"[{content}]({url})"
return content # no roles in regular commonmark
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# there's no way we can emit attrspans correctly in all cases. we could use inline
# html for ids, but that would not round-trip. same holds for classes. since this
# renderer is only used for approximate options export and all of these things are
# not allowed in options we can ignore them for now.
return ""
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return token.markup + " "
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(
List(next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""

View file

@ -1,9 +1,8 @@
from collections.abc import Mapping, MutableMapping, Sequence
from collections.abc import Mapping, Sequence
from typing import Any, cast, Optional, NamedTuple
import markdown_it
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
from xml.sax.saxutils import escape, quoteattr
from .md import Renderer
@ -32,26 +31,23 @@ class Heading(NamedTuple):
partintro_closed: bool = False
class DocBookRenderer(Renderer):
__output__ = "docbook"
_link_tags: list[str]
_deflists: list[Deflist]
_headings: list[Heading]
_attrspans: list[str]
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
super().__init__(manpage_urls, parser)
def __init__(self, manpage_urls: Mapping[str, str]):
super().__init__(manpage_urls)
self._link_tags = []
self._deflists = []
self._headings = []
self._attrspans = []
def render(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
result = super().render(tokens, options, env)
result += self._close_headings(None, env)
def render(self, tokens: Sequence[Token]) -> str:
result = super().render(tokens)
result += self._close_headings(None)
return result
def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def renderInline(self, tokens: Sequence[Token]) -> str:
# HACK to support docbook links and xrefs. link handling is only necessary because the docbook
# manpage stylesheet converts - in urls to a mathematical minus, which may be somewhat incorrect.
for i, token in enumerate(tokens):
@ -65,135 +61,98 @@ class DocBookRenderer(Renderer):
if tokens[i + 1].type == 'text' and tokens[i + 1].content == token.attrs['href']:
tokens[i + 1].content = ''
return super().renderInline(tokens, options, env)
return super().renderInline(tokens)
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para>"
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</para>"
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<literallayout>\n</literallayout>"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# should check options.breaks() and emit hard break if so
return "\n"
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"<literal>{escape(token.content)}</literal>"
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"<programlisting>{escape(token.content)}</programlisting>"
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._link_tags.append(token.tag)
href = cast(str, token.attrs['href'])
(attr, start) = ('linkend', 1) if href[0] == '#' else ('xlink:href', 0)
return f"<{token.tag} {attr}={quoteattr(href[start:])}>"
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"</{self._link_tags.pop()}>"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<listitem>"
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</listitem>\n"
# HACK open and close para for docbook change size. remove soon.
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f"<para><itemizedlist{spacing}>\n"
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n</itemizedlist></para>"
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<emphasis>"
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</emphasis>"
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<emphasis role=\"strong\">"
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</emphasis>"
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
info = f" language={quoteattr(token.info)}" if token.info != "" else ""
return f"<programlisting{info}>{escape(token.content)}</programlisting>"
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><blockquote>"
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</blockquote></para>"
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><note>"
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</note></para>"
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><caution>"
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</caution></para>"
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><important>"
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</important></para>"
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><tip>"
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</tip></para>"
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<para><warning>"
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</warning></para>"
# markdown-it emits tokens based on the html syntax tree, but docbook is
# slightly different. html has <dl>{<dt/>{<dd/>}}</dl>,
# docbook has <variablelist>{<varlistentry><term/><listitem/></varlistentry>}<variablelist>
# we have to reject multiple definitions for the same term for time being.
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.append(Deflist())
return "<para><variablelist>"
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists.pop()
return "</variablelist></para>"
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._deflists[-1].has_dd = False
return "<varlistentry><term>"
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</term>"
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if self._deflists[-1].has_dd:
raise Exception("multiple definitions per term not supported")
self._deflists[-1].has_dd = True
return "<listitem>"
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</listitem></varlistentry>"
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] == 'command':
return f"<command>{escape(token.content)}</command>"
if token.meta['name'] == 'file':
@ -216,8 +175,7 @@ class DocBookRenderer(Renderer):
else:
return ref
raise NotImplementedError("md node not supported yet", token)
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# we currently support *only* inline anchors and the special .keycap class to produce
# <keycap> docbook elements.
(id_part, class_part) = ("", "")
@ -228,31 +186,26 @@ class DocBookRenderer(Renderer):
class_part = "<keycap>"
self._attrspans.append("</keycap>")
else:
return super().attr_span_begin(token, tokens, i, options, env)
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
start = f' startingnumber="{token.attrs["start"]}"' if 'start' in token.attrs else ""
spacing = ' spacing="compact"' if token.meta.get('compact', False) else ''
return f"<orderedlist{start}{spacing}>"
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"</orderedlist>"
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
hlevel = int(token.tag[1:])
result = self._close_headings(hlevel, env)
(tag, attrs) = self._heading_tag(token, tokens, i, options, env)
result = self._close_headings(hlevel)
(tag, attrs) = self._heading_tag(token, tokens, i)
self._headings.append(Heading(tag, hlevel))
attrs_str = "".join([ f" {k}={quoteattr(v)}" for k, v in attrs.items() ])
return result + f'<{tag}{attrs_str}>\n<title>'
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
heading = self._headings[-1]
result = '</title>'
if heading.container_tag == 'part':
@ -264,16 +217,14 @@ class DocBookRenderer(Renderer):
maybe_id = " xml:id=" + quoteattr(id + "-intro")
result += f"<partintro{maybe_id}>"
return result
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if id := token.attrs.get('id'):
return f"<anchor xml:id={quoteattr(cast(str, id))} />"
return ""
def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def _close_headings(self, level: Optional[int], env: MutableMapping[str, Any]) -> str:
def _close_headings(self, level: Optional[int]) -> str:
# we rely on markdown-it producing h{1..6} tags in token.tag for this to work
result = []
while len(self._headings):
@ -286,8 +237,7 @@ class DocBookRenderer(Renderer):
break
return "\n".join(result)
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
attrs = {}
if id := token.attrs.get('id'):
attrs['xml:id'] = cast(str, id)

View file

@ -0,0 +1,245 @@
from collections.abc import Mapping, Sequence
from typing import cast, Optional, NamedTuple
from html import escape
from markdown_it.token import Token
from .manual_structure import XrefTarget
from .md import Renderer
class UnresolvedXrefError(Exception):
pass
class Heading(NamedTuple):
container_tag: str
level: int
html_tag: str
# special handling for part content: whether partinfo div was already closed from
# elsewhere or still needs closing.
partintro_closed: bool
# tocs are generated when the heading opens, but have to be emitted into the file
# after the heading titlepage (and maybe partinfo) has been closed.
toc_fragment: str
_bullet_list_styles = [ 'disc', 'circle', 'square' ]
_ordered_list_styles = [ '1', 'a', 'i', 'A', 'I' ]
class HTMLRenderer(Renderer):
_xref_targets: Mapping[str, XrefTarget]
_headings: list[Heading]
_attrspans: list[str]
_hlevel_offset: int = 0
_bullet_list_nesting: int = 0
_ordered_list_nesting: int = 0
def __init__(self, manpage_urls: Mapping[str, str], xref_targets: Mapping[str, XrefTarget]):
super().__init__(manpage_urls)
self._headings = []
self._attrspans = []
self._xref_targets = xref_targets
def render(self, tokens: Sequence[Token]) -> str:
result = super().render(tokens)
result += self._close_headings(None)
return result
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<p>"
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</p>"
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<br />"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n"
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f'<code class="literal">{escape(token.content)}</code>'
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self.fence(token, tokens, i)
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = escape(cast(str, token.attrs['href']), True)
tag, title, target, text = "link", "", 'target="_top"', ""
if href.startswith('#'):
if not (xref := self._xref_targets.get(href[1:])):
raise UnresolvedXrefError(f"bad local reference, id {href} not known")
if tokens[i + 1].type == 'link_close':
tag, text = "xref", xref.title_html
if xref.title:
title = f'title="{escape(xref.title, True)}"'
target, href = "", xref.href()
return f'<a class="{tag}" href="{href}" {title} {target}>{text}'
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</a>"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<li class="listitem">'
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</li>"
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
extra = 'compact' if token.meta.get('compact', False) else ''
style = _bullet_list_styles[self._bullet_list_nesting % len(_bullet_list_styles)]
self._bullet_list_nesting += 1
return f'<div class="itemizedlist"><ul class="itemizedlist {extra}" style="list-style-type: {style};">'
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._bullet_list_nesting -= 1
return "</ul></div>"
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<span class="emphasis"><em>'
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</em></span>"
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<span class="strong"><strong>'
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</strong></span>"
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# TODO use token.info. docbook doesn't so we can't yet.
return f'<pre class="programlisting">\n{escape(token.content)}</pre>'
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="blockquote"><blockquote class="blockquote">'
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</blockquote></div>"
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="note"><h3 class="title">Note</h3>'
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="caution"><h3 class="title">Caution</h3>'
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="important"><h3 class="title">Important</h3>'
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="tip"><h3 class="title">Tip</h3>'
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="warning"><h3 class="title">Warning</h3>'
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</div>"
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<div class="variablelist"><dl class="variablelist">'
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</dl></div>"
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return '<dt><span class="term">'
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</span></dt>"
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "<dd>"
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "</dd>"
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] == 'command':
return f'<span class="command"><strong>{escape(token.content)}</strong></span>'
if token.meta['name'] == 'file':
return f'<code class="filename">{escape(token.content)}</code>'
if token.meta['name'] == 'var':
return f'<code class="varname">{escape(token.content)}</code>'
if token.meta['name'] == 'env':
return f'<code class="envar">{escape(token.content)}</code>'
if token.meta['name'] == 'option':
return f'<code class="option">{escape(token.content)}</code>'
if token.meta['name'] == 'manpage':
[page, section] = [ s.strip() for s in token.content.rsplit('(', 1) ]
section = section[:-1]
man = f"{page}({section})"
title = f'<span class="refentrytitle">{escape(page)}</span>'
vol = f"({escape(section)})"
ref = f'<span class="citerefentry">{title}{vol}</span>'
if man in self._manpage_urls:
return f'<a class="link" href="{escape(self._manpage_urls[man], True)}" target="_top">{ref}</a>'
else:
return ref
return super().myst_role(token, tokens, i)
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# we currently support *only* inline anchors and the special .keycap class to produce
# keycap-styled spans.
(id_part, class_part) = ("", "")
if s := token.attrs.get('id'):
id_part = f'<a id="{escape(cast(str, s), True)}" />'
if s := token.attrs.get('class'):
if s == 'keycap':
class_part = '<span class="keycap"><strong>'
self._attrspans.append("</strong></span>")
else:
return super().attr_span_begin(token, tokens, i)
else:
self._attrspans.append("")
return id_part + class_part
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._attrspans.pop()
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
hlevel = int(token.tag[1:])
htag, hstyle = self._make_hN(hlevel)
if hstyle:
hstyle = f'style="{escape(hstyle, True)}"'
if anchor := cast(str, token.attrs.get('id', '')):
anchor = f'<a id="{escape(anchor, True)}"></a>'
result = self._close_headings(hlevel)
tag = self._heading_tag(token, tokens, i)
toc_fragment = self._build_toc(tokens, i)
self._headings.append(Heading(tag, hlevel, htag, tag != 'part', toc_fragment))
return (
f'{result}'
f'<div class="{tag}">'
f' <div class="titlepage">'
f' <div>'
f' <div>'
f' <{htag} class="title" {hstyle}>'
f' {anchor}'
)
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
heading = self._headings[-1]
result = (
f' </{heading.html_tag}>'
f' </div>'
f' </div>'
f'</div>'
)
if heading.container_tag == 'part':
result += '<div class="partintro">'
else:
result += heading.toc_fragment
return result
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
extra = 'compact' if token.meta.get('compact', False) else ''
start = f'start="{token.attrs["start"]}"' if 'start' in token.attrs else ""
style = _ordered_list_styles[self._ordered_list_nesting % len(_ordered_list_styles)]
self._ordered_list_nesting += 1
return f'<div class="orderedlist"><ol class="orderedlist {extra}" {start} type="{style}">'
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._ordered_list_nesting -= 1;
return "</ol></div>"
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if id := token.attrs.get('id'):
return f'<a id="{escape(cast(str, id), True)}" />'
return ""
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def _make_hN(self, level: int) -> tuple[str, str]:
return f"h{min(6, max(1, level + self._hlevel_offset))}", ""
def _maybe_close_partintro(self) -> str:
if self._headings:
heading = self._headings[-1]
if heading.container_tag == 'part' and not heading.partintro_closed:
self._headings[-1] = heading._replace(partintro_closed=True)
return heading.toc_fragment + "</div>"
return ""
def _close_headings(self, level: Optional[int]) -> str:
result = []
while len(self._headings) and (level is None or self._headings[-1].level >= level):
result.append(self._maybe_close_partintro())
result.append("</div>")
self._headings.pop()
return "\n".join(result)
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "section"
def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
return ""

View file

@ -1,4 +1,4 @@
from collections.abc import Mapping, MutableMapping, Sequence
from collections.abc import Mapping, Sequence
from dataclasses import dataclass
from typing import Any, cast, Iterable, Optional
@ -6,7 +6,6 @@ import re
import markdown_it
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
from .md import Renderer
@ -75,8 +74,6 @@ class List:
# horizontal motion in a line) we do attempt to copy the style of mdoc(7) semantic requests
# as appropriate for each markup element.
class ManpageRenderer(Renderer):
__output__ = "man"
# whether to emit mdoc .Ql equivalents for inline code or just the contents. this is
# mainly used by the options manpage converter to not emit extra quotes in defaults
# and examples where it's already clear from context that the following text is code.
@ -90,9 +87,8 @@ class ManpageRenderer(Renderer):
_list_stack: list[List]
_font_stack: list[str]
def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str],
parser: Optional[markdown_it.MarkdownIt] = None):
super().__init__(manpage_urls, parser)
def __init__(self, manpage_urls: Mapping[str, str], href_targets: dict[str, str]):
super().__init__(manpage_urls)
self._href_targets = href_targets
self._link_stack = []
self._do_parbreak_stack = []
@ -126,36 +122,27 @@ class ManpageRenderer(Renderer):
self._leave_block()
return ".RE"
def render(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def render(self, tokens: Sequence[Token]) -> str:
self._do_parbreak_stack = [ False ]
self._font_stack = [ "\\fR" ]
return super().render(tokens, options, env)
return super().render(tokens)
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return man_escape(token.content)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._maybe_parbreak()
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".br"
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return " "
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = _protect_spaces(man_escape(token.content))
return f"\\fR\\(oq{s}\\(cq\\fP" if self.inline_code_is_quoted else s
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return self.fence(token, tokens, i, options, env)
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self.fence(token, tokens, i)
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = cast(str, token.attrs['href'])
self._link_stack.append(href)
text = ""
@ -164,8 +151,7 @@ class ManpageRenderer(Renderer):
text = self._href_targets[href]
self._font_stack.append("\\fB")
return f"\\fB{text}\0 <"
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
href = self._link_stack.pop()
text = ""
if self.link_footnotes is not None:
@ -177,8 +163,7 @@ class ManpageRenderer(Renderer):
text = "\\fR" + man_escape(f"[{idx}]")
self._font_stack.pop()
return f">\0 {text}{self._font_stack[-1]}"
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
lst = self._list_stack[-1]
maybe_space = '' if lst.compact or not lst.first_item_seen else '.sp\n'
@ -192,36 +177,28 @@ class ManpageRenderer(Renderer):
f'.RS {lst.width}\n'
f"\\h'-{len(head) + 1}'\\fB{man_escape(head)}\\fP\\h'1'\\c"
)
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.append(List(width=4, compact=bool(token.meta['compact'])))
return self._maybe_parbreak()
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fI")
return "\\fI"
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.append("\\fB")
return "\\fB"
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._font_stack.pop()
return self._font_stack[-1]
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
s = man_escape(token.content).rstrip('\n')
return (
'.sp\n'
@ -231,8 +208,7 @@ class ManpageRenderer(Renderer):
'.fi\n'
'.RE'
)
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
maybe_par = self._maybe_parbreak("\n")
self._enter_block()
return (
@ -240,62 +216,44 @@ class ManpageRenderer(Renderer):
".RS 4\n"
f"\\h'-3'\\fI\\(lq\\(rq\\fP\\h'1'\\c"
)
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open("Note")
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Caution")
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Important")
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Tip")
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_open( "Warning")
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonition_close()
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RS 4"
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".RE"
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ".PP"
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._enter_block()
return ".RS 4"
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._leave_block()
return ".RE"
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.meta['name'] in [ 'command', 'env', 'option' ]:
return f'\\fB{man_escape(token.content)}\\fP'
elif token.meta['name'] in [ 'file', 'var' ]:
@ -306,23 +264,18 @@ class ManpageRenderer(Renderer):
return f'\\fB{man_escape(page)}\\fP\\fR({man_escape(section)})\\fP'
else:
raise NotImplementedError("md node not supported yet", token)
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# mdoc knows no anchors so we can drop those, but classes must be rejected.
if 'class' in token.attrs:
return super().attr_span_begin(token, tokens, i, options, env)
return super().attr_span_begin(token, tokens, i)
return ""
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return ""
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in manpages", token)
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# max item head width for a number, a dot, and one leading space and one trailing space
width = 3 + len(str(cast(int, token.meta['end'])))
self._list_stack.append(
@ -330,7 +283,6 @@ class ManpageRenderer(Renderer):
next_idx = cast(int, token.attrs.get('start', 1)),
compact = bool(token.meta['compact'])))
return self._maybe_parbreak()
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
self._list_stack.pop()
return ""

View file

@ -1,160 +1,85 @@
import argparse
import html
import json
import re
import xml.sax.saxutils as xml
from abc import abstractmethod
from collections.abc import Mapping, MutableMapping, Sequence
from collections.abc import Mapping, Sequence
from pathlib import Path
from typing import Any, cast, NamedTuple, Optional, Union
from xml.sax.saxutils import escape, quoteattr
from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple, Optional, Union
import markdown_it
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
from . import options
from .docbook import DocBookRenderer, Heading
from .md import Converter
from . import md, options
from .docbook import DocBookRenderer, Heading, make_xml_id
from .html import HTMLRenderer, UnresolvedXrefError
from .manual_structure import check_structure, FragmentType, is_include, TocEntry, TocEntryType, XrefTarget
from .md import Converter, Renderer
from .utils import Freezeable
class ManualDocBookRenderer(DocBookRenderer):
_toplevel_tag: str
def __init__(self, toplevel_tag: str, manpage_urls: Mapping[str, str],
parser: Optional[markdown_it.MarkdownIt] = None):
super().__init__(manpage_urls, parser)
self._toplevel_tag = toplevel_tag
self.rules |= {
'included_sections': lambda *args: self._included_thing("section", *args),
'included_chapters': lambda *args: self._included_thing("chapter", *args),
'included_preface': lambda *args: self._included_thing("preface", *args),
'included_parts': lambda *args: self._included_thing("part", *args),
'included_appendix': lambda *args: self._included_thing("appendix", *args),
'included_options': self.included_options,
}
def render(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
wanted = { 'h1': 'title' }
wanted |= { 'h2': 'subtitle' } if self._toplevel_tag == 'book' else {}
for (i, (tag, kind)) in enumerate(wanted.items()):
if len(tokens) < 3 * (i + 1):
raise RuntimeError(f"missing {kind} ({tag}) heading")
token = tokens[3 * i]
if token.type != 'heading_open' or token.tag != tag:
assert token.map
raise RuntimeError(f"expected {kind} ({tag}) heading in line {token.map[0] + 1}", token)
for t in tokens[3 * len(wanted):]:
if t.type != 'heading_open' or (info := wanted.get(t.tag)) is None:
continue
assert t.map
raise RuntimeError(
f"only one {info[0]} heading ({t.markup} [text...]) allowed per "
f"{self._toplevel_tag}, but found a second in lines [{t.map[0] + 1}..{t.map[1]}]. "
"please remove all such headings except the first or demote the subsequent headings.",
t)
# books get special handling because they have *two* title tags. doing this with
# generic code is more complicated than it's worth. the checks above have verified
# that both titles actually exist.
if self._toplevel_tag == 'book':
assert tokens[1].children
assert tokens[4].children
if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
maybe_id = "xml:id=" + quoteattr(maybe_id)
return (f'<book xmlns="http://docbook.org/ns/docbook"'
f' xmlns:xlink="http://www.w3.org/1999/xlink"'
f' {maybe_id} version="5.0">'
f' <title>{self.renderInline(tokens[1].children, options, env)}</title>'
f' <subtitle>{self.renderInline(tokens[4].children, options, env)}</subtitle>'
f' {super().render(tokens[6:], options, env)}'
f'</book>')
return super().render(tokens, options, env)
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> tuple[str, dict[str, str]]:
(tag, attrs) = super()._heading_tag(token, tokens, i, options, env)
# render() has already verified that we don't have supernumerary headings and since the
# book tag is handled specially we can leave the check this simple
if token.tag != 'h1':
return (tag, attrs)
return (self._toplevel_tag, attrs | {
'xmlns': "http://docbook.org/ns/docbook",
'xmlns:xlink': "http://www.w3.org/1999/xlink",
})
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int,
options: OptionsDict, env: MutableMapping[str, Any]) -> str:
result = []
# close existing partintro. the generic render doesn't really need this because
# it doesn't have a concept of structure in the way the manual does.
if self._headings and self._headings[-1] == Heading('part', 1):
result.append("</partintro>")
self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
# must nest properly for structural includes. this requires saving at least
# the headings stack, but creating new renderers is cheap and much easier.
r = ManualDocBookRenderer(tag, self._manpage_urls, None)
for (included, path) in token.meta['included']:
try:
result.append(r.render(included, options, env))
except Exception as e:
raise RuntimeError(f"rendering {path}") from e
return "".join(result)
def included_options(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return cast(str, token.meta['rendered-options'])
# TODO minimize docbook diffs with existing conversions. remove soon.
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return super().paragraph_open(token, tokens, i, options, env) + "\n "
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return "\n" + super().paragraph_close(token, tokens, i, options, env)
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return f"<programlisting>\n{escape(token.content)}</programlisting>"
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
info = f" language={quoteattr(token.info)}" if token.info != "" else ""
return f"<programlisting{info}>\n{escape(token.content)}</programlisting>"
class DocBookConverter(Converter):
def __renderer__(self, manpage_urls: Mapping[str, str],
parser: Optional[markdown_it.MarkdownIt]) -> ManualDocBookRenderer:
return ManualDocBookRenderer('book', manpage_urls, parser)
class BaseConverter(Converter[md.TR], Generic[md.TR]):
# per-converter configuration for ns:arg=value arguments to include blocks, following
# the include type. html converters need something like this to support chunking, or
# another external method like the chunktocs docbook uses (but block options seem like
# a much nicer of doing this).
INCLUDE_ARGS_NS: ClassVar[str]
INCLUDE_FRAGMENT_ALLOWED_ARGS: ClassVar[set[str]] = set()
INCLUDE_OPTIONS_ALLOWED_ARGS: ClassVar[set[str]] = set()
_base_paths: list[Path]
_revision: str
_current_type: list[TocEntryType]
def __init__(self, manpage_urls: Mapping[str, str], revision: str):
super().__init__(manpage_urls)
self._revision = revision
def convert(self, file: Path) -> str:
self._base_paths = [ file ]
def convert(self, infile: Path, outfile: Path) -> None:
self._base_paths = [ infile ]
self._current_type = ['book']
try:
with open(file, 'r') as f:
return self._render(f.read())
tokens = self._parse(infile.read_text())
self._postprocess(infile, outfile, tokens)
converted = self._renderer.render(tokens)
outfile.write_text(converted)
except Exception as e:
raise RuntimeError(f"failed to render manual {file}") from e
raise RuntimeError(f"failed to render manual {infile}") from e
def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
tokens = super()._parse(src, env)
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
pass
def _parse(self, src: str) -> list[Token]:
tokens = super()._parse(src)
check_structure(self._current_type[-1], tokens)
for token in tokens:
if token.type != "fence" or not token.info.startswith("{=include=} "):
if not is_include(token):
continue
typ = token.info[12:].strip()
directive = token.info[12:].split()
if not directive:
continue
args = { k: v for k, _sep, v in map(lambda s: s.partition('='), directive[1:]) }
typ = directive[0]
if typ == 'options':
token.type = 'included_options'
self._parse_options(token)
elif typ in [ 'sections', 'chapters', 'preface', 'parts', 'appendix' ]:
token.type = 'included_' + typ
self._parse_included_blocks(token, env)
self._process_include_args(token, args, self.INCLUDE_OPTIONS_ALLOWED_ARGS)
self._parse_options(token, args)
else:
raise RuntimeError(f"unsupported structural include type '{typ}'")
fragment_type = typ.removesuffix('s')
if fragment_type not in get_args(FragmentType):
raise RuntimeError(f"unsupported structural include type '{typ}'")
self._current_type.append(cast(FragmentType, fragment_type))
token.type = 'included_' + typ
self._process_include_args(token, args, self.INCLUDE_FRAGMENT_ALLOWED_ARGS)
self._parse_included_blocks(token, args)
self._current_type.pop()
return tokens
def _parse_included_blocks(self, token: Token, env: Optional[MutableMapping[str, Any]]) -> None:
def _process_include_args(self, token: Token, args: dict[str, str], allowed: set[str]) -> None:
ns = self.INCLUDE_ARGS_NS + ":"
args = { k[len(ns):]: v for k, v in args.items() if k.startswith(ns) }
if unknown := set(args.keys()) - allowed:
assert token.map
raise RuntimeError(f"unrecognized include argument in line {token.map[0] + 1}", unknown)
token.meta['include-args'] = args
def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
included = token.meta['included'] = []
for (lnum, line) in enumerate(token.content.splitlines(), token.map[0] + 2):
@ -165,13 +90,13 @@ class DocBookConverter(Converter):
try:
self._base_paths.append(path)
with open(path, 'r') as f:
tokens = self._parse(f.read(), env)
tokens = self._parse(f.read())
included.append((tokens, path))
self._base_paths.pop()
except Exception as e:
raise RuntimeError(f"processing included file {path} from line {lnum}") from e
def _parse_options(self, token: Token) -> None:
def _parse_options(self, token: Token, block_args: dict[str, str]) -> None:
assert token.map
items = {}
@ -194,14 +119,479 @@ class DocBookConverter(Converter):
" ".join(items.keys()))
try:
conv = options.DocBookConverter(
self._manpage_urls, self._revision, False, 'fragment', varlist_id, id_prefix)
with open(self._base_paths[-1].parent / source, 'r') as f:
conv.add_options(json.load(f))
token.meta['rendered-options'] = conv.finalize(fragment=True)
token.meta['id-prefix'] = id_prefix
token.meta['list-id'] = varlist_id
token.meta['source'] = json.load(f)
except Exception as e:
raise RuntimeError(f"processing options block in line {token.map[0] + 1}") from e
class RendererMixin(Renderer):
_toplevel_tag: str
_revision: str
def __init__(self, toplevel_tag: str, revision: str, *args: Any, **kwargs: Any):
super().__init__(*args, **kwargs)
self._toplevel_tag = toplevel_tag
self._revision = revision
self.rules |= {
'included_sections': lambda *args: self._included_thing("section", *args),
'included_chapters': lambda *args: self._included_thing("chapter", *args),
'included_preface': lambda *args: self._included_thing("preface", *args),
'included_parts': lambda *args: self._included_thing("part", *args),
'included_appendix': lambda *args: self._included_thing("appendix", *args),
'included_options': self.included_options,
}
def render(self, tokens: Sequence[Token]) -> str:
# books get special handling because they have *two* title tags. doing this with
# generic code is more complicated than it's worth. the checks above have verified
# that both titles actually exist.
if self._toplevel_tag == 'book':
return self._render_book(tokens)
return super().render(tokens)
@abstractmethod
def _render_book(self, tokens: Sequence[Token]) -> str:
raise NotImplementedError()
@abstractmethod
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
raise NotImplementedError()
@abstractmethod
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise NotImplementedError()
class ManualDocBookRenderer(RendererMixin, DocBookRenderer):
def __init__(self, toplevel_tag: str, revision: str, manpage_urls: Mapping[str, str]):
super().__init__(toplevel_tag, revision, manpage_urls)
def _render_book(self, tokens: Sequence[Token]) -> str:
assert tokens[1].children
assert tokens[4].children
if (maybe_id := cast(str, tokens[0].attrs.get('id', ""))):
maybe_id = "xml:id=" + xml.quoteattr(maybe_id)
return (f'<book xmlns="http://docbook.org/ns/docbook"'
f' xmlns:xlink="http://www.w3.org/1999/xlink"'
f' {maybe_id} version="5.0">'
f' <title>{self.renderInline(tokens[1].children)}</title>'
f' <subtitle>{self.renderInline(tokens[4].children)}</subtitle>'
f' {super(DocBookRenderer, self).render(tokens[6:])}'
f'</book>')
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> tuple[str, dict[str, str]]:
(tag, attrs) = super()._heading_tag(token, tokens, i)
# render() has already verified that we don't have supernumerary headings and since the
# book tag is handled specially we can leave the check this simple
if token.tag != 'h1':
return (tag, attrs)
return (self._toplevel_tag, attrs | {
'xmlns': "http://docbook.org/ns/docbook",
'xmlns:xlink': "http://www.w3.org/1999/xlink",
})
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
result = []
# close existing partintro. the generic render doesn't really need this because
# it doesn't have a concept of structure in the way the manual does.
if self._headings and self._headings[-1] == Heading('part', 1):
result.append("</partintro>")
self._headings[-1] = self._headings[-1]._replace(partintro_closed=True)
# must nest properly for structural includes. this requires saving at least
# the headings stack, but creating new renderers is cheap and much easier.
r = ManualDocBookRenderer(tag, self._revision, self._manpage_urls)
for (included, path) in token.meta['included']:
try:
result.append(r.render(included))
except Exception as e:
raise RuntimeError(f"rendering {path}") from e
return "".join(result)
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
conv = options.DocBookConverter(self._manpage_urls, self._revision, False, 'fragment',
token.meta['list-id'], token.meta['id-prefix'])
conv.add_options(token.meta['source'])
return conv.finalize(fragment=True)
# TODO minimize docbook diffs with existing conversions. remove soon.
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return super().paragraph_open(token, tokens, i) + "\n "
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return "\n" + super().paragraph_close(token, tokens, i)
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return f"<programlisting>\n{xml.escape(token.content)}</programlisting>"
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
info = f" language={xml.quoteattr(token.info)}" if token.info != "" else ""
return f"<programlisting{info}>\n{xml.escape(token.content)}</programlisting>"
class DocBookConverter(BaseConverter[ManualDocBookRenderer]):
INCLUDE_ARGS_NS = "docbook"
def __init__(self, manpage_urls: Mapping[str, str], revision: str):
super().__init__()
self._renderer = ManualDocBookRenderer('book', revision, manpage_urls)
class HTMLParameters(NamedTuple):
generator: str
stylesheets: Sequence[str]
scripts: Sequence[str]
toc_depth: int
chunk_toc_depth: int
class ManualHTMLRenderer(RendererMixin, HTMLRenderer):
_base_path: Path
_html_params: HTMLParameters
def __init__(self, toplevel_tag: str, revision: str, html_params: HTMLParameters,
manpage_urls: Mapping[str, str], xref_targets: dict[str, XrefTarget],
base_path: Path):
super().__init__(toplevel_tag, revision, manpage_urls, xref_targets)
self._base_path, self._html_params = base_path, html_params
def _push(self, tag: str, hlevel_offset: int) -> Any:
result = (self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset)
self._hlevel_offset += hlevel_offset
self._toplevel_tag, self._headings, self._attrspans = tag, [], []
return result
def _pop(self, state: Any) -> None:
(self._toplevel_tag, self._headings, self._attrspans, self._hlevel_offset) = state
def _render_book(self, tokens: Sequence[Token]) -> str:
assert tokens[4].children
title_id = cast(str, tokens[0].attrs.get('id', ""))
title = self._xref_targets[title_id].title
# subtitles don't have IDs, so we can't use xrefs to get them
subtitle = self.renderInline(tokens[4].children)
toc = TocEntry.of(tokens[0])
return "\n".join([
self._file_header(toc),
' <div class="book">',
' <div class="titlepage">',
' <div>',
f' <div><h1 class="title"><a id="{html.escape(title_id, True)}"></a>{title}</h1></div>',
f' <div><h2 class="subtitle">{subtitle}</h2></div>',
' </div>',
" <hr />",
' </div>',
self._build_toc(tokens, 0),
super(HTMLRenderer, self).render(tokens[6:]),
' </div>',
self._file_footer(toc),
])
def _file_header(self, toc: TocEntry) -> str:
prev_link, up_link, next_link = "", "", ""
prev_a, next_a, parent_title = "", "", "&nbsp;"
home = toc.root
if toc.prev:
prev_link = f'<link rel="prev" href="{toc.prev.target.href()}" title="{toc.prev.target.title}" />'
prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
if toc.parent:
up_link = (
f'<link rel="up" href="{toc.parent.target.href()}" '
f'title="{toc.parent.target.title}" />'
)
if (part := toc.parent) and part.kind != 'book':
assert part.target.title
parent_title = part.target.title
if toc.next:
next_link = f'<link rel="next" href="{toc.next.target.href()}" title="{toc.next.target.title}" />'
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
return "\n".join([
'<?xml version="1.0" encoding="utf-8" standalone="no"?>',
'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"',
' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">',
'<html xmlns="http://www.w3.org/1999/xhtml">',
' <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />',
f' <title>{toc.target.title}</title>',
"".join((f'<link rel="stylesheet" type="text/css" href="{html.escape(style, True)}" />'
for style in self._html_params.stylesheets)),
"".join((f'<script src="{html.escape(script, True)}" type="text/javascript"></script>'
for script in self._html_params.scripts)),
f' <meta name="generator" content="{html.escape(self._html_params.generator, True)}" />',
f' <link rel="home" href="{home.target.href()}" title="{home.target.title}" />',
f' {up_link}{prev_link}{next_link}',
' </head>',
' <body>',
' <div class="navheader">',
' <table width="100%" summary="Navigation header">',
' <tr>',
f' <th colspan="3" align="center">{toc.target.title}</th>',
' </tr>',
' <tr>',
f' <td width="20%" align="left">{prev_a}&nbsp;</td>',
f' <th width="60%" align="center">{parent_title}</th>',
f' <td width="20%" align="right">&nbsp;{next_a}</td>',
' </tr>',
' </table>',
' <hr />',
' </div>',
])
def _file_footer(self, toc: TocEntry) -> str:
# prev, next = self._get_prev_and_next()
prev_a, up_a, home_a, next_a = "", "&nbsp;", "&nbsp;", ""
prev_text, up_text, next_text = "", "", ""
home = toc.root
if toc.prev:
prev_a = f'<a accesskey="p" href="{toc.prev.target.href()}">Prev</a>'
assert toc.prev.target.title
prev_text = toc.prev.target.title
if toc.parent:
home_a = f'<a accesskey="h" href="{home.target.href()}">Home</a>'
if toc.parent != home:
up_a = f'<a accesskey="u" href="{toc.parent.target.href()}">Up</a>'
if toc.next:
next_a = f'<a accesskey="n" href="{toc.next.target.href()}">Next</a>'
assert toc.next.target.title
next_text = toc.next.target.title
return "\n".join([
' <div class="navfooter">',
' <hr />',
' <table width="100%" summary="Navigation footer">',
' <tr>',
f' <td width="40%" align="left">{prev_a}&nbsp;</td>',
f' <td width="20%" align="center">{up_a}</td>',
f' <td width="40%" align="right">&nbsp;{next_a}</td>',
' </tr>',
' <tr>',
f' <td width="40%" align="left" valign="top">{prev_text}&nbsp;</td>',
f' <td width="20%" align="center">{home_a}</td>',
f' <td width="40%" align="right" valign="top">&nbsp;{next_text}</td>',
' </tr>',
' </table>',
' </div>',
' </body>',
'</html>',
])
def _heading_tag(self, token: Token, tokens: Sequence[Token], i: int) -> str:
if token.tag == 'h1':
return self._toplevel_tag
return super()._heading_tag(token, tokens, i)
def _build_toc(self, tokens: Sequence[Token], i: int) -> str:
toc = TocEntry.of(tokens[i])
if toc.kind == 'section':
return ""
def walk_and_emit(toc: TocEntry, depth: int) -> list[str]:
if depth <= 0:
return []
result = []
for child in toc.children:
result.append(
f'<dt>'
f' <span class="{html.escape(child.kind, True)}">'
f' <a href="{child.target.href()}">{child.target.toc_html}</a>'
f' </span>'
f'</dt>'
)
# we want to look straight through parts because docbook-xsl does too, but it
# also makes for more uesful top-level tocs.
next_level = walk_and_emit(child, depth - (0 if child.kind == 'part' else 1))
if next_level:
result.append(f'<dd><dl>{"".join(next_level)}</dl></dd>')
return result
toc_depth = (
self._html_params.chunk_toc_depth
if toc.starts_new_chunk and toc.kind != 'book'
else self._html_params.toc_depth
)
if not (items := walk_and_emit(toc, toc_depth)):
return ""
return (
f'<div class="toc">'
f' <p><strong>Table of Contents</strong></p>'
f' <dl class="toc">'
f' {"".join(items)}'
f' </dl>'
f'</div>'
)
def _make_hN(self, level: int) -> tuple[str, str]:
# for some reason chapters don't increase the hN nesting count in docbook xslts. duplicate
# this for consistency.
if self._toplevel_tag == 'chapter':
level -= 1
# TODO docbook compat. these are never useful for us, but not having them breaks manual
# compare workflows while docbook is still allowed.
style = ""
if level + self._hlevel_offset < 3 \
and (self._toplevel_tag == 'section' or (self._toplevel_tag == 'chapter' and level > 0)):
style = "clear: both"
tag, hstyle = super()._make_hN(max(1, level))
return tag, style
def _included_thing(self, tag: str, token: Token, tokens: Sequence[Token], i: int) -> str:
outer, inner = [], []
# since books have no non-include content the toplevel book wrapper will not count
# towards nesting depth. other types will have at least a title+id heading which
# *does* count towards the nesting depth. chapters give a -1 to included sections
# mirroring the special handing in _make_hN. sigh.
hoffset = (
0 if not self._headings
else self._headings[-1].level - 1 if self._toplevel_tag == 'chapter'
else self._headings[-1].level
)
outer.append(self._maybe_close_partintro())
into = token.meta['include-args'].get('into-file')
fragments = token.meta['included']
state = self._push(tag, hoffset)
if into:
toc = TocEntry.of(fragments[0][0][0])
inner.append(self._file_header(toc))
# we do not set _hlevel_offset=0 because docbook doesn't either.
else:
inner = outer
for included, path in fragments:
try:
inner.append(self.render(included))
except Exception as e:
raise RuntimeError(f"rendering {path}") from e
if into:
inner.append(self._file_footer(toc))
(self._base_path / into).write_text("".join(inner))
self._pop(state)
return "".join(outer)
def included_options(self, token: Token, tokens: Sequence[Token], i: int) -> str:
conv = options.HTMLConverter(self._manpage_urls, self._revision, False,
token.meta['list-id'], token.meta['id-prefix'],
self._xref_targets)
conv.add_options(token.meta['source'])
return conv.finalize()
def _to_base26(n: int) -> str:
return (_to_base26(n // 26) if n > 26 else "") + chr(ord("A") + n % 26)
class HTMLConverter(BaseConverter[ManualHTMLRenderer]):
INCLUDE_ARGS_NS = "html"
INCLUDE_FRAGMENT_ALLOWED_ARGS = { 'into-file' }
_revision: str
_html_params: HTMLParameters
_manpage_urls: Mapping[str, str]
_xref_targets: dict[str, XrefTarget]
_redirection_targets: set[str]
_appendix_count: int = 0
def _next_appendix_id(self) -> str:
self._appendix_count += 1
return _to_base26(self._appendix_count - 1)
def __init__(self, revision: str, html_params: HTMLParameters, manpage_urls: Mapping[str, str]):
super().__init__()
self._revision, self._html_params, self._manpage_urls = revision, html_params, manpage_urls
self._xref_targets = {}
self._redirection_targets = set()
# renderer not set on purpose since it has a dependency on the output path!
def convert(self, infile: Path, outfile: Path) -> None:
self._renderer = ManualHTMLRenderer('book', self._revision, self._html_params,
self._manpage_urls, self._xref_targets, outfile.parent)
super().convert(infile, outfile)
def _parse(self, src: str) -> list[Token]:
tokens = super()._parse(src)
for token in tokens:
if not token.type.startswith('included_') \
or not (into := token.meta['include-args'].get('into-file')):
continue
assert token.map
if len(token.meta['included']) == 0:
raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is empty!")
# we use blender-style //path to denote paths relative to the origin file
# (usually index.html). this makes everything a lot easier and clearer.
if not into.startswith("//") or '/' in into[2:]:
raise RuntimeError(f"html:into-file must be a relative-to-origin //filename", into)
into = token.meta['include-args']['into-file'] = into[2:]
if into in self._redirection_targets:
raise RuntimeError(f"redirection target {into} in line {token.map[0] + 1} is already in use")
self._redirection_targets.add(into)
return tokens
# xref | (id, type, heading inlines, file, starts new file)
def _collect_ids(self, tokens: Sequence[Token], target_file: str, typ: str, file_changed: bool
) -> list[XrefTarget | tuple[str, str, Token, str, bool]]:
result: list[XrefTarget | tuple[str, str, Token, str, bool]] = []
# collect all IDs and their xref substitutions. headings are deferred until everything
# has been parsed so we can resolve links in headings. if that's even used anywhere.
for (i, bt) in enumerate(tokens):
if bt.type == 'heading_open' and (id := cast(str, bt.attrs.get('id', ''))):
result.append((id, typ if bt.tag == 'h1' else 'section', tokens[i + 1], target_file,
i == 0 and file_changed))
elif bt.type == 'included_options':
id_prefix = bt.meta['id-prefix']
for opt in bt.meta['source'].keys():
id = make_xml_id(f"{id_prefix}{opt}")
name = html.escape(opt)
result.append(XrefTarget(id, f'<code class="option">{name}</code>', name, None, target_file))
elif bt.type.startswith('included_'):
sub_file = bt.meta['include-args'].get('into-file', target_file)
subtyp = bt.type.removeprefix('included_').removesuffix('s')
for si, (sub, _path) in enumerate(bt.meta['included']):
result += self._collect_ids(sub, sub_file, subtyp, si == 0 and sub_file != target_file)
elif bt.type == 'inline':
assert bt.children
result += self._collect_ids(bt.children, target_file, typ, False)
elif id := cast(str, bt.attrs.get('id', '')):
# anchors and examples have no titles we could use, but we'll have to put
# *something* here to communicate that there's no title.
result.append(XrefTarget(id, "???", None, None, target_file))
return result
def _render_xref(self, id: str, typ: str, inlines: Token, path: str, drop_fragment: bool) -> XrefTarget:
assert inlines.children
title_html = self._renderer.renderInline(inlines.children)
if typ == 'appendix':
# NOTE the docbook compat is strong here
n = self._next_appendix_id()
prefix = f"Appendix\u00A0{n}.\u00A0"
# HACK for docbook compat: prefix the title inlines with appendix id if
# necessary. the alternative is to mess with titlepage rendering in headings,
# which seems just a lot worse than this
prefix_tokens = [Token(type='text', tag='', nesting=0, content=prefix)]
inlines.children = prefix_tokens + list(inlines.children)
title = prefix + title_html
toc_html = f"{n}. {title_html}"
title_html = f"Appendix&nbsp;{n}"
else:
toc_html, title = title_html, title_html
title_html = (
f"<em>{title_html}</em>"
if typ == 'chapter'
else title_html if typ in [ 'book', 'part' ]
else f'the section called “{title_html}'
)
return XrefTarget(id, title_html, toc_html, re.sub('<.*?>', '', title), path, drop_fragment)
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
xref_queue = self._collect_ids(tokens, outfile.name, 'book', True)
failed = False
deferred = []
while xref_queue:
for item in xref_queue:
try:
target = item if isinstance(item, XrefTarget) else self._render_xref(*item)
except UnresolvedXrefError as e:
if failed:
raise
deferred.append(item)
continue
if target.id in self._xref_targets:
raise RuntimeError(f"found duplicate id #{target.id}")
self._xref_targets[target.id] = target
if len(deferred) == len(xref_queue):
failed = True # do another round and report the first error
xref_queue = deferred
TocEntry.collect_and_link(self._xref_targets, tokens)
def _build_cli_db(p: argparse.ArgumentParser) -> None:
@ -210,18 +600,40 @@ def _build_cli_db(p: argparse.ArgumentParser) -> None:
p.add_argument('infile', type=Path)
p.add_argument('outfile', type=Path)
def _build_cli_html(p: argparse.ArgumentParser) -> None:
p.add_argument('--manpage-urls', required=True)
p.add_argument('--revision', required=True)
p.add_argument('--generator', default='nixos-render-docs')
p.add_argument('--stylesheet', default=[], action='append')
p.add_argument('--script', default=[], action='append')
p.add_argument('--toc-depth', default=1, type=int)
p.add_argument('--chunk-toc-depth', default=1, type=int)
p.add_argument('infile', type=Path)
p.add_argument('outfile', type=Path)
def _run_cli_db(args: argparse.Namespace) -> None:
with open(args.manpage_urls, 'r') as manpage_urls:
md = DocBookConverter(json.load(manpage_urls), args.revision)
converted = md.convert(args.infile)
args.outfile.write_text(converted)
md.convert(args.infile, args.outfile)
def _run_cli_html(args: argparse.Namespace) -> None:
with open(args.manpage_urls, 'r') as manpage_urls:
md = HTMLConverter(
args.revision,
HTMLParameters(args.generator, args.stylesheet, args.script, args.toc_depth,
args.chunk_toc_depth),
json.load(manpage_urls))
md.convert(args.infile, args.outfile)
def build_cli(p: argparse.ArgumentParser) -> None:
formats = p.add_subparsers(dest='format', required=True)
_build_cli_db(formats.add_parser('docbook'))
_build_cli_html(formats.add_parser('html'))
def run_cli(args: argparse.Namespace) -> None:
if args.format == 'docbook':
_run_cli_db(args)
elif args.format == 'html':
_run_cli_html(args)
else:
raise RuntimeError('format not hooked up', args)

View file

@ -0,0 +1,186 @@
from __future__ import annotations
import dataclasses as dc
import html
import itertools
from typing import cast, get_args, Iterable, Literal, Sequence
from markdown_it.token import Token
from .utils import Freezeable
# FragmentType is used to restrict structural include blocks.
FragmentType = Literal['preface', 'part', 'chapter', 'section', 'appendix']
# in the TOC all fragments are allowed, plus the all-encompassing book.
TocEntryType = Literal['book', 'preface', 'part', 'chapter', 'section', 'appendix']
def is_include(token: Token) -> bool:
return token.type == "fence" and token.info.startswith("{=include=} ")
# toplevel file must contain only the title headings and includes, anything else
# would cause strange rendering.
def _check_book_structure(tokens: Sequence[Token]) -> None:
for token in tokens[6:]:
if not is_include(token):
assert token.map
raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
"expected structural include")
# much like books, parts may not contain headings other than their title heading.
# this is a limitation of the current renderers and TOC generators that do not handle
# this case well even though it is supported in docbook (and probably supportable
# anywhere else).
def _check_part_structure(tokens: Sequence[Token]) -> None:
_check_fragment_structure(tokens)
for token in tokens[3:]:
if token.type == 'heading_open':
assert token.map
raise RuntimeError(f"unexpected heading in line {token.map[0] + 1}")
# two include blocks must either be adjacent or separated by a heading, otherwise
# we cannot generate a correct TOC (since there'd be nothing to link to between
# the two includes).
def _check_fragment_structure(tokens: Sequence[Token]) -> None:
for i, token in enumerate(tokens):
if is_include(token) \
and i + 1 < len(tokens) \
and not (is_include(tokens[i + 1]) or tokens[i + 1].type == 'heading_open'):
assert token.map
raise RuntimeError(f"unexpected content in line {token.map[0] + 1}, "
"expected heading or structural include")
def check_structure(kind: TocEntryType, tokens: Sequence[Token]) -> None:
wanted = { 'h1': 'title' }
wanted |= { 'h2': 'subtitle' } if kind == 'book' else {}
for (i, (tag, role)) in enumerate(wanted.items()):
if len(tokens) < 3 * (i + 1):
raise RuntimeError(f"missing {role} ({tag}) heading")
token = tokens[3 * i]
if token.type != 'heading_open' or token.tag != tag:
assert token.map
raise RuntimeError(f"expected {role} ({tag}) heading in line {token.map[0] + 1}", token)
for t in tokens[3 * len(wanted):]:
if t.type != 'heading_open' or not (role := wanted.get(t.tag, '')):
continue
assert t.map
raise RuntimeError(
f"only one {role} heading ({t.markup} [text...]) allowed per "
f"{kind}, but found a second in line {t.map[0] + 1}. "
"please remove all such headings except the first or demote the subsequent headings.",
t)
last_heading_level = 0
for token in tokens:
if token.type != 'heading_open':
continue
# book subtitle headings do not need an id, only book title headings do.
# every other headings needs one too. we need this to build a TOC and to
# provide stable links if the manual changes shape.
if 'id' not in token.attrs and (kind != 'book' or token.tag != 'h2'):
assert token.map
raise RuntimeError(f"heading in line {token.map[0] + 1} does not have an id")
level = int(token.tag[1:]) # because tag = h1..h6
if level > last_heading_level + 1:
assert token.map
raise RuntimeError(f"heading in line {token.map[0] + 1} skips one or more heading levels, "
"which is currently not allowed")
last_heading_level = level
if kind == 'book':
_check_book_structure(tokens)
elif kind == 'part':
_check_part_structure(tokens)
else:
_check_fragment_structure(tokens)
@dc.dataclass(frozen=True)
class XrefTarget:
id: str
"""link label for `[](#local-references)`"""
title_html: str
"""toc label"""
toc_html: str | None
"""text for `<title>` tags and `title="..."` attributes"""
title: str | None
"""path to file that contains the anchor"""
path: str
"""whether to drop the `#anchor` from links when expanding xrefs"""
drop_fragment: bool = False
def href(self) -> str:
path = html.escape(self.path, True)
return path if self.drop_fragment else f"{path}#{html.escape(self.id, True)}"
@dc.dataclass
class TocEntry(Freezeable):
kind: TocEntryType
target: XrefTarget
parent: TocEntry | None = None
prev: TocEntry | None = None
next: TocEntry | None = None
children: list[TocEntry] = dc.field(default_factory=list)
starts_new_chunk: bool = False
@property
def root(self) -> TocEntry:
return self.parent.root if self.parent else self
@classmethod
def of(cls, token: Token) -> TocEntry:
entry = token.meta.get('TocEntry')
if not isinstance(entry, TocEntry):
raise RuntimeError('requested toc entry, none found', token)
return entry
@classmethod
def collect_and_link(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token]) -> TocEntry:
result = cls._collect_entries(xrefs, tokens, 'book')
def flatten_with_parent(this: TocEntry, parent: TocEntry | None) -> Iterable[TocEntry]:
this.parent = parent
return itertools.chain([this], *[ flatten_with_parent(c, this) for c in this.children ])
flat = list(flatten_with_parent(result, None))
prev = flat[0]
prev.starts_new_chunk = True
paths_seen = set([prev.target.path])
for c in flat[1:]:
if prev.target.path != c.target.path and c.target.path not in paths_seen:
c.starts_new_chunk = True
c.prev, prev.next = prev, c
prev = c
paths_seen.add(c.target.path)
for c in flat:
c.freeze()
return result
@classmethod
def _collect_entries(cls, xrefs: dict[str, XrefTarget], tokens: Sequence[Token],
kind: TocEntryType) -> TocEntry:
# we assume that check_structure has been run recursively over the entire input.
# list contains (tag, entry) pairs that will collapse to a single entry for
# the full sequence.
entries: list[tuple[str, TocEntry]] = []
for token in tokens:
if token.type.startswith('included_') and (included := token.meta.get('included')):
fragment_type_str = token.type[9:].removesuffix('s')
assert fragment_type_str in get_args(TocEntryType)
fragment_type = cast(TocEntryType, fragment_type_str)
for fragment, _path in included:
entries[-1][1].children.append(cls._collect_entries(xrefs, fragment, fragment_type))
elif token.type == 'heading_open' and (id := cast(str, token.attrs.get('id', ''))):
while len(entries) > 1 and entries[-1][0] >= token.tag:
entries[-2][1].children.append(entries.pop()[1])
entries.append((token.tag,
TocEntry(kind if token.tag == 'h1' else 'section', xrefs[id])))
token.meta['TocEntry'] = entries[-1][1]
while len(entries) > 1:
entries[-2][1].children.append(entries.pop()[1])
return entries[0][1]

View file

@ -1,6 +1,6 @@
from abc import ABC
from collections.abc import Mapping, MutableMapping, Sequence
from typing import Any, Callable, cast, get_args, Iterable, Literal, NoReturn, Optional
from typing import Any, Callable, cast, Generic, get_args, Iterable, Literal, NoReturn, Optional, TypeVar
import dataclasses
import re
@ -44,11 +44,11 @@ AttrBlockKind = Literal['admonition', 'example']
AdmonitionKind = Literal["note", "caution", "tip", "important", "warning"]
class Renderer(markdown_it.renderer.RendererProtocol):
class Renderer:
_admonitions: dict[AdmonitionKind, tuple[RenderFn, RenderFn]]
_admonition_stack: list[AdmonitionKind]
def __init__(self, manpage_urls: Mapping[str, str], parser: Optional[markdown_it.MarkdownIt] = None):
def __init__(self, manpage_urls: Mapping[str, str]):
self._manpage_urls = manpage_urls
self.rules = {
'text': self.text,
@ -104,169 +104,120 @@ class Renderer(markdown_it.renderer.RendererProtocol):
def _join_inline(self, ls: Iterable[str]) -> str:
return "".join(ls)
def admonition_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def admonition_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
tag = token.meta['kind']
self._admonition_stack.append(tag)
return self._admonitions[tag][0](token, tokens, i, options, env)
def admonition_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i, options, env)
return self._admonitions[tag][0](token, tokens, i)
def admonition_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
return self._admonitions[self._admonition_stack.pop()][1](token, tokens, i)
def render(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def render(self, tokens: Sequence[Token]) -> str:
def do_one(i: int, token: Token) -> str:
if token.type == "inline":
assert token.children is not None
return self.renderInline(token.children, options, env)
return self.renderInline(token.children)
elif token.type in self.rules:
return self.rules[token.type](tokens[i], tokens, i, options, env)
return self.rules[token.type](tokens[i], tokens, i)
else:
raise NotImplementedError("md token not supported yet", token)
return self._join_block(map(lambda arg: do_one(*arg), enumerate(tokens)))
def renderInline(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def renderInline(self, tokens: Sequence[Token]) -> str:
def do_one(i: int, token: Token) -> str:
if token.type in self.rules:
return self.rules[token.type](tokens[i], tokens, i, options, env)
return self.rules[token.type](tokens[i], tokens, i)
else:
raise NotImplementedError("md token not supported yet", token)
return self._join_inline(map(lambda arg: do_one(*arg), enumerate(tokens)))
def text(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def text(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def paragraph_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def hardbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def softbreak(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def softbreak(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def code_inline(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_inline(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def code_block(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def code_block(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def link_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def link_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def link_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def list_item_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def bullet_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def em_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def em_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def em_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def strong_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def strong_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def strong_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def fence(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def blockquote_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def note_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def note_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def note_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def caution_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def caution_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def caution_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def important_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def important_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def important_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tip_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def tip_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def tip_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def warning_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def warning_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def warning_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dl_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dl_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dl_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dt_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dt_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dt_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dd_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def dd_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def dd_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def myst_role(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def myst_role(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_end(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def example_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def example_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported", token)
def _is_escaped(src: str, pos: int) -> bool:
@ -466,12 +417,26 @@ def _block_attr(md: markdown_it.MarkdownIt) -> None:
md.core.ruler.push("block_attr", block_attr)
class Converter(ABC):
__renderer__: Callable[[Mapping[str, str], markdown_it.MarkdownIt], Renderer]
TR = TypeVar('TR', bound='Renderer')
def __init__(self, manpage_urls: Mapping[str, str]):
self._manpage_urls = manpage_urls
class Converter(ABC, Generic[TR]):
# we explicitly disable markdown-it rendering support and use our own entirely.
# rendering is well separated from parsing and our renderers carry much more state than
# markdown-it easily acknowledges as 'good' (unless we used the untyped env args to
# shuttle that state around, which is very fragile)
class ForbiddenRenderer(markdown_it.renderer.RendererProtocol):
__output__ = "none"
def __init__(self, parser: Optional[markdown_it.MarkdownIt]):
pass
def render(self, tokens: Sequence[Token], options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
raise NotImplementedError("do not use Converter._md.renderer. 'tis a silly place")
_renderer: TR
def __init__(self) -> None:
self._md = markdown_it.MarkdownIt(
"commonmark",
{
@ -479,7 +444,7 @@ class Converter(ABC):
'html': False, # not useful since we target many formats
'typographer': True, # required for smartquotes
},
renderer_cls=lambda parser: self.__renderer__(self._manpage_urls, parser)
renderer_cls=self.ForbiddenRenderer
)
self._md.use(
container_plugin,
@ -496,10 +461,9 @@ class Converter(ABC):
self._md.use(_block_attr)
self._md.enable(["smartquotes", "replacements"])
def _parse(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> list[Token]:
return self._md.parse(src, env if env is not None else {})
def _parse(self, src: str) -> list[Token]:
return self._md.parse(src, {})
def _render(self, src: str, env: Optional[MutableMapping[str, Any]] = None) -> str:
env = {} if env is None else env
tokens = self._parse(src, env)
return self._md.renderer.render(tokens, self._md.options, env) # type: ignore[no-any-return]
def _render(self, src: str) -> str:
tokens = self._parse(src)
return self._renderer.render(tokens)

View file

@ -1,23 +1,26 @@
from __future__ import annotations
import argparse
import html
import json
import xml.sax.saxutils as xml
from abc import abstractmethod
from collections.abc import Mapping, MutableMapping, Sequence
from markdown_it.utils import OptionsDict
from collections.abc import Mapping, Sequence
from markdown_it.token import Token
from typing import Any, Optional
from typing import Any, Generic, Optional
from urllib.parse import quote
from xml.sax.saxutils import escape, quoteattr
import markdown_it
from . import md
from . import parallel
from .asciidoc import AsciiDocRenderer, asciidoc_escape
from .commonmark import CommonMarkRenderer
from .docbook import DocBookRenderer, make_xml_id
from .html import HTMLRenderer
from .manpage import ManpageRenderer, man_escape
from .manual_structure import XrefTarget
from .md import Converter, md_escape, md_make_code
from .types import OptionLoc, Option, RenderedOption
@ -30,15 +33,13 @@ def option_is(option: Option, key: str, typ: str) -> Optional[dict[str, str]]:
return None
return option[key] # type: ignore[return-value]
class BaseConverter(Converter):
class BaseConverter(Converter[md.TR], Generic[md.TR]):
__option_block_separator__: str
_options: dict[str, RenderedOption]
def __init__(self, manpage_urls: Mapping[str, str],
revision: str,
markdown_by_default: bool):
super().__init__(manpage_urls)
def __init__(self, revision: str, markdown_by_default: bool):
super().__init__()
self._options = {}
self._revision = revision
self._markdown_by_default = markdown_by_default
@ -153,7 +154,7 @@ class BaseConverter(Converter):
# since it's good enough so far.
@classmethod
@abstractmethod
def _parallel_render_init_worker(cls, a: Any) -> BaseConverter: raise NotImplementedError()
def _parallel_render_init_worker(cls, a: Any) -> BaseConverter[md.TR]: raise NotImplementedError()
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
try:
@ -162,7 +163,7 @@ class BaseConverter(Converter):
raise Exception(f"Failed to render option {name}") from e
@classmethod
def _parallel_render_step(cls, s: BaseConverter, a: Any) -> RenderedOption:
def _parallel_render_step(cls, s: BaseConverter[md.TR], a: Any) -> RenderedOption:
return s._render_option(*a)
def add_options(self, options: dict[str, Any]) -> None:
@ -175,32 +176,25 @@ class BaseConverter(Converter):
def finalize(self) -> str: raise NotImplementedError()
class OptionDocsRestrictions:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
def heading_close(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def heading_close(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def attr_span_begin(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
def example_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def example_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
raise RuntimeError("md token not supported in options doc", token)
class OptionsDocBookRenderer(OptionDocsRestrictions, DocBookRenderer):
# TODO keep optionsDocBook diff small. remove soon if rendering is still good.
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
return super().ordered_list_open(token, tokens, i, options, env)
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int, options: OptionsDict,
env: MutableMapping[str, Any]) -> str:
return super().ordered_list_open(token, tokens, i)
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
return super().bullet_list_open(token, tokens, i, options, env)
return super().bullet_list_open(token, tokens, i)
class DocBookConverter(BaseConverter):
__renderer__ = OptionsDocBookRenderer
class DocBookConverter(BaseConverter[OptionsDocBookRenderer]):
__option_block_separator__ = ""
def __init__(self, manpage_urls: Mapping[str, str],
@ -209,13 +203,14 @@ class DocBookConverter(BaseConverter):
document_type: str,
varlist_id: str,
id_prefix: str):
super().__init__(manpage_urls, revision, markdown_by_default)
super().__init__(revision, markdown_by_default)
self._renderer = OptionsDocBookRenderer(manpage_urls)
self._document_type = document_type
self._varlist_id = varlist_id
self._id_prefix = id_prefix
def _parallel_render_prepare(self) -> Any:
return (self._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default, self._document_type,
self._varlist_id, self._id_prefix)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> DocBookConverter:
@ -248,10 +243,10 @@ class DocBookConverter(BaseConverter):
def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
if href is not None:
href = " xlink:href=" + quoteattr(href)
href = " xlink:href=" + xml.quoteattr(href)
return [
f"<member><filename{href}>",
escape(name),
xml.escape(name),
"</filename></member>"
]
@ -281,8 +276,8 @@ class DocBookConverter(BaseConverter):
result += [
"<varlistentry>",
# NOTE adding extra spaces here introduces spaces into xref link expansions
(f"<term xlink:href={quoteattr('#' + id)} xml:id={quoteattr(id)}>" +
f"<option>{escape(name)}</option></term>"),
(f"<term xlink:href={xml.quoteattr('#' + id)} xml:id={xml.quoteattr(id)}>" +
f"<option>{xml.escape(name)}</option></term>"),
"<listitem>"
]
result += opt.lines
@ -300,11 +295,7 @@ class DocBookConverter(BaseConverter):
class OptionsManpageRenderer(OptionDocsRestrictions, ManpageRenderer):
pass
class ManpageConverter(BaseConverter):
def __renderer__(self, manpage_urls: Mapping[str, str],
parser: Optional[markdown_it.MarkdownIt] = None) -> OptionsManpageRenderer:
return OptionsManpageRenderer(manpage_urls, self._options_by_id, parser)
class ManpageConverter(BaseConverter[OptionsManpageRenderer]):
__option_block_separator__ = ".sp"
_options_by_id: dict[str, str]
@ -314,8 +305,9 @@ class ManpageConverter(BaseConverter):
*,
# only for parallel rendering
_options_by_id: Optional[dict[str, str]] = None):
super().__init__(revision, markdown_by_default)
self._options_by_id = _options_by_id or {}
super().__init__({}, revision, markdown_by_default)
self._renderer = OptionsManpageRenderer({}, self._options_by_id)
def _parallel_render_prepare(self) -> Any:
return ((self._revision, self._markdown_by_default), { '_options_by_id': self._options_by_id })
@ -324,10 +316,9 @@ class ManpageConverter(BaseConverter):
return cls(*a[0], **a[1])
def _render_option(self, name: str, option: dict[str, Any]) -> RenderedOption:
assert isinstance(self._md.renderer, OptionsManpageRenderer)
links = self._md.renderer.link_footnotes = []
links = self._renderer.link_footnotes = []
result = super()._render_option(name, option)
self._md.renderer.link_footnotes = None
self._renderer.link_footnotes = None
return result._replace(links=links)
def add_options(self, options: dict[str, Any]) -> None:
@ -339,12 +330,11 @@ class ManpageConverter(BaseConverter):
if lit := option_is(option, key, 'literalDocBook'):
raise RuntimeError("can't render manpages in the presence of docbook")
else:
assert isinstance(self._md.renderer, OptionsManpageRenderer)
try:
self._md.renderer.inline_code_is_quoted = False
self._renderer.inline_code_is_quoted = False
return super()._render_code(option, key)
finally:
self._md.renderer.inline_code_is_quoted = True
self._renderer.inline_code_is_quoted = True
def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
if isinstance(desc, str) and not self._markdown_by_default:
@ -428,12 +418,15 @@ class ManpageConverter(BaseConverter):
class OptionsCommonMarkRenderer(OptionDocsRestrictions, CommonMarkRenderer):
pass
class CommonMarkConverter(BaseConverter):
__renderer__ = OptionsCommonMarkRenderer
class CommonMarkConverter(BaseConverter[OptionsCommonMarkRenderer]):
__option_block_separator__ = ""
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
super().__init__(revision, markdown_by_default)
self._renderer = OptionsCommonMarkRenderer(manpage_urls)
def _parallel_render_prepare(self) -> Any:
return (self._manpage_urls, self._revision, self._markdown_by_default)
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> CommonMarkConverter:
return cls(*a)
@ -481,12 +474,15 @@ class CommonMarkConverter(BaseConverter):
class OptionsAsciiDocRenderer(OptionDocsRestrictions, AsciiDocRenderer):
pass
class AsciiDocConverter(BaseConverter):
__renderer__ = AsciiDocRenderer
class AsciiDocConverter(BaseConverter[OptionsAsciiDocRenderer]):
__option_block_separator__ = ""
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool):
super().__init__(revision, markdown_by_default)
self._renderer = OptionsAsciiDocRenderer(manpage_urls)
def _parallel_render_prepare(self) -> Any:
return (self._manpage_urls, self._revision, self._markdown_by_default)
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> AsciiDocConverter:
return cls(*a)
@ -531,6 +527,109 @@ class AsciiDocConverter(BaseConverter):
return "\n".join(result)
class OptionsHTMLRenderer(OptionDocsRestrictions, HTMLRenderer):
# TODO docbook compat. must be removed together with the matching docbook handlers.
def ordered_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
return super().ordered_list_open(token, tokens, i)
def bullet_list_open(self, token: Token, tokens: Sequence[Token], i: int) -> str:
token.meta['compact'] = False
return super().bullet_list_open(token, tokens, i)
def fence(self, token: Token, tokens: Sequence[Token], i: int) -> str:
# TODO use token.info. docbook doesn't so we can't yet.
return f'<pre class="programlisting">{html.escape(token.content)}</pre>'
class HTMLConverter(BaseConverter[OptionsHTMLRenderer]):
__option_block_separator__ = ""
def __init__(self, manpage_urls: Mapping[str, str], revision: str, markdown_by_default: bool,
varlist_id: str, id_prefix: str, xref_targets: Mapping[str, XrefTarget]):
super().__init__(revision, markdown_by_default)
self._xref_targets = xref_targets
self._varlist_id = varlist_id
self._id_prefix = id_prefix
self._renderer = OptionsHTMLRenderer(manpage_urls, self._xref_targets)
def _parallel_render_prepare(self) -> Any:
return (self._renderer._manpage_urls, self._revision, self._markdown_by_default,
self._varlist_id, self._id_prefix, self._xref_targets)
@classmethod
def _parallel_render_init_worker(cls, a: Any) -> HTMLConverter:
return cls(*a)
def _render_code(self, option: dict[str, Any], key: str) -> list[str]:
if lit := option_is(option, key, 'literalDocBook'):
raise RuntimeError("can't render html in the presence of docbook")
else:
return super()._render_code(option, key)
def _render_description(self, desc: str | dict[str, Any]) -> list[str]:
if isinstance(desc, str) and not self._markdown_by_default:
raise RuntimeError("can't render html in the presence of docbook")
else:
return super()._render_description(desc)
def _related_packages_header(self) -> list[str]:
return [
'<p><span class="emphasis"><em>Related packages:</em></span></p>',
]
def _decl_def_header(self, header: str) -> list[str]:
return [
f'<p><span class="emphasis"><em>{header}:</em></span></p>',
'<table border="0" summary="Simple list" class="simplelist">'
]
def _decl_def_entry(self, href: Optional[str], name: str) -> list[str]:
if href is not None:
href = f' href="{html.escape(href, True)}"'
return [
"<tr><td>",
f'<code class="filename"><a class="filename" {href} target="_top">',
f'{html.escape(name)}',
'</a></code>',
"</td></tr>"
]
def _decl_def_footer(self) -> list[str]:
return [ "</table>" ]
def finalize(self) -> str:
result = []
result += [
'<div class="variablelist">',
f'<a id="{html.escape(self._varlist_id, True)}"></a>',
' <dl class="variablelist">',
]
for (name, opt) in self._sorted_options():
id = make_xml_id(self._id_prefix + name)
target = self._xref_targets[id]
result += [
'<dt>',
' <span class="term">',
# docbook compat, these could be one tag
f' <a id="{html.escape(id, True)}"></a><a class="term" href="{target.href()}">'
# no spaces here (and string merging) for docbook output compat
f'<code class="option">{html.escape(name)}</code>',
' </a>',
' </span>',
'</dt>',
'<dd>',
]
result += opt.lines
result += [
"</dd>",
]
result += [
" </dl>",
"</div>"
]
return "\n".join(result)
def _build_cli_db(p: argparse.ArgumentParser) -> None:
p.add_argument('--manpage-urls', required=True)
p.add_argument('--revision', required=True)

View file

@ -1,8 +1,7 @@
from collections.abc import Sequence, MutableMapping
from collections.abc import Sequence
from typing import Any, Callable, Optional, Tuple, NamedTuple
from markdown_it.token import Token
from markdown_it.utils import OptionsDict
OptionLoc = str | dict[str, str]
Option = dict[str, str | dict[str, str] | list[OptionLoc]]
@ -12,4 +11,4 @@ class RenderedOption(NamedTuple):
lines: list[str]
links: Optional[list[str]] = None
RenderFn = Callable[[Token, Sequence[Token], int, OptionsDict, MutableMapping[str, Any]], str]
RenderFn = Callable[[Token, Sequence[Token], int], str]

View file

@ -0,0 +1,21 @@
from typing import Any
_frozen_classes: dict[type, type] = {}
# make a derived class freezable (ie, disallow modifications).
# we do this by changing the class of an instance at runtime when freeze()
# is called, providing a derived class that is exactly the same except
# for a __setattr__ that raises an error when called. this beats having
# a field for frozenness and an unconditional __setattr__ that checks this
# field because it does not insert anything into the class dict.
class Freezeable:
def freeze(self) -> None:
cls = type(self)
if not (frozen := _frozen_classes.get(cls)):
def __setattr__(instance: Any, n: str, v: Any) -> None:
raise TypeError(f'{cls.__name__} is frozen')
frozen = type(cls.__name__, (cls,), {
'__setattr__': __setattr__,
})
_frozen_classes[cls] = frozen
self.__class__ = frozen

View file

@ -1,9 +1,11 @@
import nixos_render_docs
import nixos_render_docs as nrd
from sample_md import sample1
class Converter(nixos_render_docs.md.Converter):
__renderer__ = nixos_render_docs.asciidoc.AsciiDocRenderer
class Converter(nrd.md.Converter[nrd.asciidoc.AsciiDocRenderer]):
def __init__(self, manpage_urls: dict[str, str]):
super().__init__()
self._renderer = nrd.asciidoc.AsciiDocRenderer(manpage_urls)
def test_lists() -> None:
c = Converter({})

View file

@ -1,4 +1,4 @@
import nixos_render_docs
import nixos_render_docs as nrd
from sample_md import sample1
@ -6,8 +6,10 @@ from typing import Mapping, Optional
import markdown_it
class Converter(nixos_render_docs.md.Converter):
__renderer__ = nixos_render_docs.commonmark.CommonMarkRenderer
class Converter(nrd.md.Converter[nrd.commonmark.CommonMarkRenderer]):
def __init__(self, manpage_urls: Mapping[str, str]):
super().__init__()
self._renderer = nrd.commonmark.CommonMarkRenderer(manpage_urls)
# NOTE: in these tests we represent trailing spaces by ` ` and replace them with real space later,
# since a number of editors will strip trailing whitespace on save and that would break the tests.

View file

@ -1,10 +1,12 @@
import nixos_render_docs
import nixos_render_docs as nrd
from markdown_it.token import Token
class Converter(nixos_render_docs.md.Converter):
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
def __init__(self, manpage_urls: dict[str, str]) -> None:
super().__init__()
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
def test_heading_id_absent() -> None:
c = Converter({})

View file

@ -0,0 +1,179 @@
import nixos_render_docs as nrd
import pytest
from sample_md import sample1
class Converter(nrd.md.Converter[nrd.html.HTMLRenderer]):
def __init__(self, manpage_urls: dict[str, str], xrefs: dict[str, nrd.manual_structure.XrefTarget]):
super().__init__()
self._renderer = nrd.html.HTMLRenderer(manpage_urls, xrefs)
def unpretty(s: str) -> str:
return "".join(map(str.strip, s.splitlines())).replace('', ' ').replace('', '\n')
def test_lists_styles() -> None:
# nested lists rotate through a number of list style
c = Converter({}, {})
assert c._render("- - - - foo") == unpretty("""
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
<li class="listitem">
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: circle;">
<li class="listitem">
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: square;">
<li class="listitem">
<div class="itemizedlist"><ul class="itemizedlist compact" style="list-style-type: disc;">
<li class="listitem"><p>foo</p></li>
</ul></div>
</li>
</ul></div>
</li>
</ul></div>
</li>
</ul></div>
""")
assert c._render("1. 1. 1. 1. 1. 1. foo") == unpretty("""
<div class="orderedlist"><ol class="orderedlist compact" type="1">
<li class="listitem">
<div class="orderedlist"><ol class="orderedlist compact" type="a">
<li class="listitem">
<div class="orderedlist"><ol class="orderedlist compact" type="i">
<li class="listitem">
<div class="orderedlist"><ol class="orderedlist compact" type="A">
<li class="listitem">
<div class="orderedlist"><ol class="orderedlist compact" type="I">
<li class="listitem">
<div class="orderedlist"><ol class="orderedlist compact" type="1">
<li class="listitem"><p>foo</p></li>
</ol></div>
</li>
</ol></div>
</li>
</ol></div>
</li>
</ol></div>
</li>
</ol></div>
</li>
</ol></div>
""")
def test_xrefs() -> None:
# nested lists rotate through a number of list style
c = Converter({}, {
'foo': nrd.manual_structure.XrefTarget('foo', '<hr/>', 'toc1', 'title1', 'index.html'),
'bar': nrd.manual_structure.XrefTarget('bar', '<br/>', 'toc2', 'title2', 'index.html', True),
})
assert c._render("[](#foo)") == '<p><a class="xref" href="index.html#foo" title="title1" ><hr/></a></p>'
assert c._render("[](#bar)") == '<p><a class="xref" href="index.html" title="title2" ><br/></a></p>'
with pytest.raises(nrd.html.UnresolvedXrefError) as exc:
c._render("[](#baz)")
assert exc.value.args[0] == 'bad local reference, id #baz not known'
def test_full() -> None:
c = Converter({ 'man(1)': 'http://example.org' }, {})
assert c._render(sample1) == unpretty("""
<div class="warning">
<h3 class="title">Warning</h3>
<p>foo</p>
<div class="note">
<h3 class="title">Note</h3>
<p>nested</p>
</div>
</div>
<p>
<a class="link" href="link" target="_top">
multiline
</a>
</p>
<p>
<a class="link" href="http://example.org" target="_top">
<span class="citerefentry"><span class="refentrytitle">man</span>(1)</span>
</a> reference
</p>
<p><a id="b" />some <a id="a" />nested anchors</p>
<p>
<span class="emphasis"><em>emph</em></span>
<span class="strong"><strong>strong</strong></span>
<span class="emphasis"><em>nesting emph <span class="strong"><strong>and strong</strong></span>
and <code class="literal">code</code></em></span>
</p>
<div class="itemizedlist">
<ul class="itemizedlist " style="list-style-type: disc;">
<li class="listitem"><p>wide bullet</p></li>
<li class="listitem"><p>list</p></li>
</ul>
</div>
<div class="orderedlist">
<ol class="orderedlist " type="1">
<li class="listitem"><p>wide ordered</p></li>
<li class="listitem"><p>list</p></li>
</ol>
</div>
<div class="itemizedlist">
<ul class="itemizedlist compact" style="list-style-type: disc;">
<li class="listitem"><p>narrow bullet</p></li>
<li class="listitem"><p>list</p></li>
</ul>
</div>
<div class="orderedlist">
<ol class="orderedlist compact" type="1">
<li class="listitem"><p>narrow ordered</p></li>
<li class="listitem"><p>list</p></li>
</ol>
</div>
<div class="blockquote">
<blockquote class="blockquote">
<p>quotes</p>
<div class="blockquote">
<blockquote class="blockquote">
<p>with <span class="emphasis"><em>nesting</em></span></p>
<pre class="programlisting">
nested code block
</pre>
</blockquote>
</div>
<div class="itemizedlist">
<ul class="itemizedlist compact" style="list-style-type: disc;">
<li class="listitem"><p>and lists</p></li>
<li class="listitem">
<pre class="programlisting">
containing code
</pre>
</li>
</ul>
</div>
<p>and more quote</p>
</blockquote>
</div>
<div class="orderedlist">
<ol class="orderedlist compact" start="100" type="1">
<li class="listitem"><p>list starting at 100</p></li>
<li class="listitem"><p>goes on</p></li>
</ol>
</div>
<div class="variablelist">
<dl class="variablelist">
<dt><span class="term">deflist</span></dt>
<dd>
<div class="blockquote">
<blockquote class="blockquote">
<p>
with a quote
and stuff
</p>
</blockquote>
</div>
<pre class="programlisting">
code block
</pre>
<pre class="programlisting">
fenced block
</pre>
<p>text</p>
</dd>
<dt><span class="term">more stuff in same deflist</span></dt>
<dd>
<p>foo</p>
</dd>
</dl>
</div>""")

View file

@ -1,11 +1,13 @@
import nixos_render_docs
import nixos_render_docs as nrd
import pytest
from markdown_it.token import Token
class Converter(nixos_render_docs.md.Converter):
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
def __init__(self, manpage_urls: dict[str, str]) -> None:
super().__init__()
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
@pytest.mark.parametrize("ordered", [True, False])
def test_list_wide(ordered: bool) -> None:

View file

@ -1,4 +1,4 @@
import nixos_render_docs
import nixos_render_docs as nrd
from sample_md import sample1
@ -6,15 +6,10 @@ from typing import Mapping, Optional
import markdown_it
class Converter(nixos_render_docs.md.Converter):
def __renderer__(self, manpage_urls: Mapping[str, str],
parser: Optional[markdown_it.MarkdownIt] = None
) -> nixos_render_docs.manpage.ManpageRenderer:
return nixos_render_docs.manpage.ManpageRenderer(manpage_urls, self.options_by_id, parser)
class Converter(nrd.md.Converter[nrd.manpage.ManpageRenderer]):
def __init__(self, manpage_urls: Mapping[str, str], options_by_id: dict[str, str] = {}):
self.options_by_id = options_by_id
super().__init__(manpage_urls)
super().__init__()
self._renderer = nrd.manpage.ManpageRenderer(manpage_urls, options_by_id)
def test_inline_code() -> None:
c = Converter({})
@ -32,17 +27,15 @@ def test_expand_link_targets() -> None:
def test_collect_links() -> None:
c = Converter({}, { '#foo': "bar" })
assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
c._md.renderer.link_footnotes = []
c._renderer.link_footnotes = []
assert c._render("[a](link1) [b](link2)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[2]\\fR"
assert c._md.renderer.link_footnotes == ['link1', 'link2']
assert c._renderer.link_footnotes == ['link1', 'link2']
def test_dedup_links() -> None:
c = Converter({}, { '#foo': "bar" })
assert isinstance(c._md.renderer, nixos_render_docs.manpage.ManpageRenderer)
c._md.renderer.link_footnotes = []
c._renderer.link_footnotes = []
assert c._render("[a](link) [b](link)") == "\\fBa\\fR[1]\\fR \\fBb\\fR[1]\\fR"
assert c._md.renderer.link_footnotes == ['link']
assert c._renderer.link_footnotes == ['link']
def test_full() -> None:
c = Converter({ 'man(1)': 'http://example.org' })

View file

@ -1,10 +1,12 @@
import nixos_render_docs
import nixos_render_docs as nrd
from markdown_it.token import Token
class Converter(nixos_render_docs.md.Converter):
class Converter(nrd.md.Converter[nrd.docbook.DocBookRenderer]):
# actual renderer doesn't matter, we're just parsing.
__renderer__ = nixos_render_docs.docbook.DocBookRenderer
def __init__(self, manpage_urls: dict[str, str]) -> None:
super().__init__()
self._renderer = nrd.docbook.DocBookRenderer(manpage_urls)
def test_attr_span_parsing() -> None:
c = Converter({})