Skip to content

Commit

Permalink
adds block_args for autogenerated ids from trustworthy sources
Browse files Browse the repository at this point in the history
  • Loading branch information
hsjobeki authored and pennae committed Jan 29, 2024
1 parent 24d9778 commit 48a2178
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 6 deletions.
2 changes: 1 addition & 1 deletion doc/doc-support/lib-function-docs.nix
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ stdenv.mkDerivation {
mkdir -p "$out"
cat > "$out/index.md" << 'EOF'
```{=include=} sections
```{=include=} sections auto-id-prefix=auto-generated
EOF
${lib.concatMapStrings ({ name, baseName ? name, description }: ''
Expand Down
47 changes: 42 additions & 5 deletions pkgs/tools/nix/nixos-render-docs/src/nixos_render_docs/manual.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from abc import abstractmethod
from collections.abc import Mapping, Sequence
from pathlib import Path
from typing import Any, cast, ClassVar, Generic, get_args, NamedTuple
from typing import Any, Callable, cast, ClassVar, Generic, get_args, NamedTuple

from markdown_it.token import Token

Expand Down Expand Up @@ -44,8 +44,40 @@ def convert(self, infile: Path, outfile: Path) -> None:
def _postprocess(self, infile: Path, outfile: Path, tokens: Sequence[Token]) -> None:
pass

def _parse(self, src: str) -> list[Token]:
def _handle_headings(self, tokens: list[Token], *, on_heading: Callable[[Token,str],None]) -> None:
# Headings in a globally numbered order
# h1 to h6
curr_heading_pos: list[int] = []
for token in tokens:
if token.type == "heading_open":
if token.tag not in ["h1", "h2", "h3", "h4", "h5", "h6"]:
raise RuntimeError(f"Got invalid heading tag {token.tag} in line {token.map[0] + 1 if token.map else 'NOT FOUND'}. Only h1 to h6 headings are allowed.")

idx = int(token.tag[1:]) - 1

if idx >= len(curr_heading_pos):
# extend the list if necessary
curr_heading_pos.extend([0 for _i in range(idx+1 - len(curr_heading_pos))])

curr_heading_pos = curr_heading_pos[:idx+1]
curr_heading_pos[-1] += 1


ident = ".".join(f"{a}" for a in curr_heading_pos)
on_heading(token,ident)



def _parse(self, src: str, *, auto_id_prefix: None | str = None) -> list[Token]:
tokens = super()._parse(src)
if auto_id_prefix:
def set_token_ident(token: Token, ident: str) -> None:
if "id" not in token.attrs:
token.attrs["id"] = f"{auto_id_prefix}-{ident}"

self._handle_headings(tokens, on_heading=set_token_ident)


check_structure(self._current_type[-1], tokens)
for token in tokens:
if not is_include(token):
Expand Down Expand Up @@ -89,7 +121,12 @@ def _parse_included_blocks(self, token: Token, block_args: dict[str, str]) -> No
try:
self._base_paths.append(path)
with open(path, 'r') as f:
tokens = self._parse(f.read())
prefix = None
if "auto-id-prefix" in block_args:
# include the current file number to prevent duplicate ids within include blocks
prefix = f"{block_args.get('auto-id-prefix')}-{lnum}"

tokens = self._parse(f.read(), auto_id_prefix=prefix)
included.append((tokens, path))
self._base_paths.pop()
except Exception as e:
Expand Down Expand Up @@ -554,8 +591,8 @@ def convert(self, infile: Path, outfile: Path) -> None:
infile.parent, outfile.parent)
super().convert(infile, outfile)

def _parse(self, src: str) -> list[Token]:
tokens = super()._parse(src)
def _parse(self, src: str, *, auto_id_prefix: None | str = None) -> list[Token]:
tokens = super()._parse(src,auto_id_prefix=auto_id_prefix)
for token in tokens:
if not token.type.startswith('included_') \
or not (into := token.meta['include-args'].get('into-file')):
Expand Down
92 changes: 92 additions & 0 deletions pkgs/tools/nix/nixos-render-docs/src/tests/test_auto_id_prefix.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
from pathlib import Path

from markdown_it.token import Token
from nixos_render_docs.manual import HTMLConverter, HTMLParameters
from nixos_render_docs.md import Converter

auto_id_prefix="TEST_PREFIX"
def set_prefix(token: Token, ident: str) -> None:
token.attrs["id"] = f"{auto_id_prefix}-{ident}"


def test_auto_id_prefix_simple() -> None:
md = HTMLConverter("1.0.0", HTMLParameters("", [], [], 2, 2, 2, Path("")), {})

src = f"""
# title
## subtitle
"""
tokens = Converter()._parse(src)
md._handle_headings(tokens, on_heading=set_prefix)

assert [
{**token.attrs, "tag": token.tag}
for token in tokens
if token.type == "heading_open"
] == [
{"id": "TEST_PREFIX-1", "tag": "h1"},
{"id": "TEST_PREFIX-1.1", "tag": "h2"}
]


def test_auto_id_prefix_repeated() -> None:
md = HTMLConverter("1.0.0", HTMLParameters("", [], [], 2, 2, 2, Path("")), {})

src = f"""
# title
## subtitle
# title2
## subtitle2
"""
tokens = Converter()._parse(src)
md._handle_headings(tokens, on_heading=set_prefix)

assert [
{**token.attrs, "tag": token.tag}
for token in tokens
if token.type == "heading_open"
] == [
{"id": "TEST_PREFIX-1", "tag": "h1"},
{"id": "TEST_PREFIX-1.1", "tag": "h2"},
{"id": "TEST_PREFIX-2", "tag": "h1"},
{"id": "TEST_PREFIX-2.1", "tag": "h2"},
]

def test_auto_id_prefix_maximum_nested() -> None:
md = HTMLConverter("1.0.0", HTMLParameters("", [], [], 2, 2, 2, Path("")), {})

src = f"""
# h1
## h2
### h3
#### h4
##### h5
###### h6
## h2.2
"""
tokens = Converter()._parse(src)
md._handle_headings(tokens, on_heading=set_prefix)

assert [
{**token.attrs, "tag": token.tag}
for token in tokens
if token.type == "heading_open"
] == [
{"id": "TEST_PREFIX-1", "tag": "h1"},
{"id": "TEST_PREFIX-1.1", "tag": "h2"},
{"id": "TEST_PREFIX-1.1.1", "tag": "h3"},
{"id": "TEST_PREFIX-1.1.1.1", "tag": "h4"},
{"id": "TEST_PREFIX-1.1.1.1.1", "tag": "h5"},
{"id": "TEST_PREFIX-1.1.1.1.1.1", "tag": "h6"},
{"id": "TEST_PREFIX-1.2", "tag": "h2"},
]

0 comments on commit 48a2178

Please sign in to comment.