diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e08f2e61..b3dd2508 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,27 +24,12 @@ repos: - id: mixed-line-ending - id: trailing-whitespace - - repo: https://github.com/asottile/pyupgrade - rev: v3.15.0 - hooks: - - id: pyupgrade - args: [--py38-plus] - - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 - hooks: - - id: isort - - - repo: https://github.com/psf/black - rev: 23.10.1 - hooks: - - id: black - - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.1.4 hooks: - id: ruff args: ["--fix", "--show-fixes"] + - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.6.1 diff --git a/docs/conf.py b/docs/conf.py index 0f954923..4a10dcd6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -191,9 +191,7 @@ class MystNbConfigDirective(_ConfigBase): required_arguments = 1 option_spec = { "sphinx": directives.flag, - "section": lambda x: directives.choice( - x, ["config", "read", "execute", "render"] - ), + "section": lambda x: directives.choice(x, ["config", "read", "execute", "render"]), } def run(self): diff --git a/myst_nb/cli.py b/myst_nb/cli.py index 58637d03..456170d8 100644 --- a/myst_nb/cli.py +++ b/myst_nb/cli.py @@ -29,9 +29,7 @@ def quickstart(args: list[str] | None = None): # write conf.py (path / "conf.py").write_text(generate_conf_py(), encoding="utf-8") # write index.md - (path / "index.md").write_text( - generate_index(["notebook1", "notebook2"]), encoding="utf-8" - ) + (path / "index.md").write_text(generate_index(["notebook1", "notebook2"]), encoding="utf-8") # write notebook1.ipynb (path / "notebook1.ipynb").write_text(generate_jupyter_notebook(), encoding="utf-8") # write notebook2.md @@ -42,12 +40,8 @@ def quickstart(args: list[str] | None = None): def create_quickstart_cli(): cli = argparse.ArgumentParser(description="Create a basic myst_nb project.") - cli.add_argument( - "path", metavar="PATH", type=str, help="Directory to output the project." - ) - cli.add_argument( - "-o", "--overwrite", action="store_true", help="Overwrite existing files." - ) + cli.add_argument("path", metavar="PATH", type=str, help="Directory to output the project.") + cli.add_argument("-o", "--overwrite", action="store_true", help="Overwrite existing files.") cli.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity.") return cli @@ -166,9 +160,7 @@ def create_md_to_nb_cli(): cli = argparse.ArgumentParser( description="Convert a text-based notebook to a Jupyter notebook." ) - cli.add_argument( - "inpath", metavar="PATH_IN", type=str, help="Path to Markdown file." - ) + cli.add_argument("inpath", metavar="PATH_IN", type=str, help="Path to Markdown file.") cli.add_argument( "outpath", metavar="PATH_OUT", @@ -176,8 +168,6 @@ def create_md_to_nb_cli(): type=str, help="Path to output to.", ) - cli.add_argument( - "-o", "--overwrite", action="store_true", help="Overwrite existing files." - ) + cli.add_argument("-o", "--overwrite", action="store_true", help="Overwrite existing files.") cli.add_argument("-v", "--verbose", action="store_true", help="Increase verbosity.") return cli diff --git a/myst_nb/core/config.py b/myst_nb/core/config.py index d3b5ad9d..a9a71e06 100644 --- a/myst_nb/core/config.py +++ b/myst_nb/core/config.py @@ -27,9 +27,7 @@ def custom_formats_converter(value: dict) -> Dict[str, Tuple[str, dict, bool]]: if isinstance(reader, str): output[suffix] = (reader, {}, False) elif not isinstance(reader, Sequence): - raise TypeError( - f"`nb_custom_formats` values must be a string or sequence: {reader}" - ) + raise TypeError(f"`nb_custom_formats` values must be a string or sequence: {reader}") elif len(reader) == 2: output[suffix] = (reader[0], reader[1], False) elif len(reader) == 3: @@ -40,18 +38,12 @@ def custom_formats_converter(value: dict) -> Dict[str, Tuple[str, dict, bool]]: f"2 or 3: {reader}" ) if not isinstance(output[suffix][0], str): - raise TypeError( - f"`nb_custom_formats` values[0] must be a string: {output[suffix][0]}" - ) + raise TypeError(f"`nb_custom_formats` values[0] must be a string: {output[suffix][0]}") # TODO check can be loaded as a python object? if not isinstance(output[suffix][1], dict): - raise TypeError( - f"`nb_custom_formats` values[1] must be a dict: {output[suffix][1]}" - ) + raise TypeError(f"`nb_custom_formats` values[1] must be a dict: {output[suffix][1]}") if not isinstance(output[suffix][2], bool): - raise TypeError( - f"`nb_custom_formats` values[2] must be a bool: {output[suffix][2]}" - ) + raise TypeError(f"`nb_custom_formats` values[2] must be a bool: {output[suffix][2]}") return output @@ -264,8 +256,7 @@ def __post_init__(self): default=False, metadata={ "validator": instance_of(bool), - "help": "Raise an exception on failed execution, " - "rather than emitting a warning", + "help": "Raise an exception on failed execution, " "rather than emitting a warning", "sections": (Section.global_lvl, Section.file_lvl, Section.execute), }, ) @@ -389,9 +380,7 @@ def __post_init__(self): default=(), metadata={ "validator": deep_iterable( - has_items( - instance_of(str), instance_of(str), optional(instance_of(int)) - ), + has_items(instance_of(str), instance_of(str), optional(instance_of(int))), ), "help": "Overrides for the base render priority of mime types: " "list of (builder name, mime type, priority)", @@ -401,9 +390,7 @@ def __post_init__(self): }, repr=False, ) - output_stderr: Literal[ - "show", "remove", "remove-warn", "warn", "error", "severe" - ] = dc.field( + output_stderr: Literal["show", "remove", "remove-warn", "warn", "error", "severe"] = dc.field( default="show", metadata={ "validator": in_( diff --git a/myst_nb/core/execute/base.py b/myst_nb/core/execute/base.py index befb6d29..3e1a3d2a 100644 --- a/myst_nb/core/execute/base.py +++ b/myst_nb/core/execute/base.py @@ -78,9 +78,7 @@ def __enter__(self) -> NotebookClientBase: """Enter the context manager.""" self.start_client() # extract glue data from the notebook - self._glue_data = extract_glue_data( - self.notebook, self._source_map, self.logger - ) + self._glue_data = extract_glue_data(self.notebook, self._source_map, self.logger) return self @final @@ -154,9 +152,7 @@ def nb_source_code_lexer(self) -> str | None: lexer = (metadata.get("kernelspec") or {}).get("language", None) return lexer - def code_cell_outputs( - self, cell_index: int - ) -> tuple[int | None, list[NotebookNode]]: + def code_cell_outputs(self, cell_index: int) -> tuple[int | None, list[NotebookNode]]: """Get the outputs of a cell. :returns: a tuple of the execution_count and the outputs diff --git a/myst_nb/core/execute/cache.py b/myst_nb/core/execute/cache.py index f7a26101..a9b04f59 100644 --- a/myst_nb/core/execute/cache.py +++ b/myst_nb/core/execute/cache.py @@ -45,9 +45,7 @@ def start_client(self): return if self.path is None: - raise ValueError( - "Input source must exist as file, if execution_mode is 'cache'" - ) + raise ValueError("Input source must exist as file, if execution_mode is 'cache'") # attempt to execute the notebook read_fmt = self._kwargs.get("read_fmt", None) diff --git a/myst_nb/core/execute/direct.py b/myst_nb/core/execute/direct.py index a8e3e55f..d8b37e3e 100644 --- a/myst_nb/core/execute/direct.py +++ b/myst_nb/core/execute/direct.py @@ -24,9 +24,7 @@ def start_client(self): cwd_context = TemporaryDirectory() else: if self.path is None: - raise ValueError( - "Input source must exist as file, if execution_in_temp=False" - ) + raise ValueError("Input source must exist as file, if execution_in_temp=False") cwd_context = nullcontext(str(self.path.parent)) # execute in the context of the current working directory diff --git a/myst_nb/core/execute/inline.py b/myst_nb/core/execute/inline.py index 13d20786..64ca9b0c 100644 --- a/myst_nb/core/execute/inline.py +++ b/myst_nb/core/execute/inline.py @@ -41,9 +41,7 @@ def start_client(self): resources = {"metadata": {"path": self._tmp_path}} else: if self.path is None: - raise ValueError( - "Input source must exist as file, if execution_in_temp=False" - ) + raise ValueError("Input source must exist as file, if execution_in_temp=False") resources = {"metadata": {"path": str(self.path.parent)}} self.logger.info("Starting inline execution client") @@ -68,9 +66,7 @@ def start_client(self): msg_id = self._client.kc.kernel_info() info_msg = self._client.wait_for_reply(msg_id) if info_msg is not None and "language_info" in info_msg["content"]: - self.notebook.metadata["language_info"] = info_msg["content"][ - "language_info" - ] + self.notebook.metadata["language_info"] = info_msg["content"]["language_info"] else: self.logger.warning("Failed to retrieve language info from kernel") @@ -96,9 +92,7 @@ def close_client(self, exc_type, exc_val, exc_tb): "runtime": _exec_time, "method": self.nb_config.execution_mode, "succeeded": False if self._cell_error else True, - "error": f"{self._cell_error.__class__.__name__}" - if self._cell_error - else None, + "error": f"{self._cell_error.__class__.__name__}" if self._cell_error else None, "traceback": self._exc_string, } if not self._cell_error: @@ -111,9 +105,7 @@ def close_client(self, exc_type, exc_val, exc_tb): if self._tmp_path: shutil.rmtree(self._tmp_path, ignore_errors=True) - def code_cell_outputs( - self, cell_index: int - ) -> tuple[int | None, list[NotebookNode]]: + def code_cell_outputs(self, cell_index: int) -> tuple[int | None, list[NotebookNode]]: cells = self.notebook.get("cells", []) # ensure all cells up to and including the requested cell have been executed diff --git a/myst_nb/core/lexers.py b/myst_nb/core/lexers.py index 2d4978b6..e2b49d38 100644 --- a/myst_nb/core/lexers.py +++ b/myst_nb/core/lexers.py @@ -20,9 +20,7 @@ } -def _token_from_lexer_state( - bold: bool, faint: bool, fg_color: str | None, bg_color: str | None -): +def _token_from_lexer_state(bold: bool, faint: bool, fg_color: str | None, bg_color: str | None): """Construct a token given the current lexer state. We can only emit one token even though we have a multiple-tuple state. diff --git a/myst_nb/core/loggers.py b/myst_nb/core/loggers.py index d898d364..16673b92 100644 --- a/myst_nb/core/loggers.py +++ b/myst_nb/core/loggers.py @@ -80,9 +80,7 @@ class DocutilsDocLogger(logging.LoggerAdapter): ] def __init__(self, document: nodes.document, type_name: str = DEFAULT_LOG_TYPE): - self.logger: logging.Logger = logging.getLogger( - f"{type_name}-{document.source}" - ) + self.logger: logging.Logger = logging.getLogger(f"{type_name}-{document.source}") # docutils handles the level of output logging self.logger.setLevel(logging.DEBUG) if not self.logger.handlers: diff --git a/myst_nb/core/read.py b/myst_nb/core/read.py index 2b3dae4b..8821c9ff 100644 --- a/myst_nb/core/read.py +++ b/myst_nb/core/read.py @@ -137,9 +137,7 @@ def is_myst_markdown_notebook(text: str | Iterator[str]) -> bool: if "file_format" in metadata and metadata["file_format"] == "mystnb": return True if ( - metadata.get("jupytext", {}) - .get("text_representation", {}) - .get("format_name", None) + metadata.get("jupytext", {}).get("text_representation", {}).get("format_name", None) != "myst" ): return False @@ -164,9 +162,7 @@ def myst_nb_reader_plugin(uri: str) -> nbf.NotebookNode: Used as plugin for jupyter-cache. """ - return read_myst_markdown_notebook( - Path(uri).read_text("utf8"), add_source_map=True, path=uri - ) + return read_myst_markdown_notebook(Path(uri).read_text("utf8"), add_source_map=True, path=uri) def read_myst_markdown_notebook( @@ -193,9 +189,7 @@ def read_myst_markdown_notebook( """ config = config or MdParserConfig() # parse markdown file up to the block level (i.e. don't worry about inline text) - inline_config = dc.replace( - config, disable_syntax=(list(config.disable_syntax) + ["inline"]) - ) + inline_config = dc.replace(config, disable_syntax=(list(config.disable_syntax) + ["inline"])) parser = create_md_parser(inline_config, RendererHTML) tokens = parser.parse(text + "\n") lines = text.splitlines() @@ -233,9 +227,7 @@ def _flush_markdown(start_line, token, md_metadata): meta = nbf.from_dict(md_metadata) if md_source: source_map.append(start_line) - notebook.cells.append( - nbf_version.new_markdown_cell(source=md_source, metadata=meta) - ) + notebook.cells.append(nbf_version.new_markdown_cell(source=md_source, metadata=meta)) # iterate through the tokens to identify notebook cells nesting_level = 0 @@ -255,9 +247,7 @@ def _flush_markdown(start_line, token, md_metadata): options, body_lines = _read_fenced_cell(token, len(notebook.cells), "Code") # Parse :load: or load: tags and populate body with contents of file if "load" in options: - body_lines = _load_code_from_file( - path, options["load"], token, body_lines - ) + body_lines = _load_code_from_file(path, options["load"], token, body_lines) meta = nbf.from_dict(options) source_map.append(token_map[0] + 1) notebook.cells.append( @@ -343,17 +333,13 @@ def _read_cell_metadata(token, cell_index): ) if not isinstance(metadata, dict): raise MystMetadataParsingError( - "Markdown cell {} at line {} is not a dict".format( - cell_index, token.map[0] + 1 - ) + "Markdown cell {} at line {} is not a dict".format(cell_index, token.map[0] + 1) ) return metadata -def _load_code_from_file( - nb_path: None | str | Path, file_name: str, token, body_lines: list[str] -): +def _load_code_from_file(nb_path: None | str | Path, file_name: str, token, body_lines: list[str]): """load source code from a file.""" if nb_path is None: raise _LoadFileParsingError("path to notebook not supplied for :load:") diff --git a/myst_nb/core/render.py b/myst_nb/core/render.py index 4e2a0ba3..0856f19f 100644 --- a/myst_nb/core/render.py +++ b/myst_nb/core/render.py @@ -140,9 +140,7 @@ def render_nb_cell_code(self: SelfType, token: SyntaxTreeNode) -> None: # TODO do we need this -/_ duplication of tag names, or can we deprecate one? hide_cell = "hide-cell" in tags remove_input = ( - self.get_cell_level_config( - "remove_code_source", token.meta["metadata"], line=cell_line - ) + self.get_cell_level_config("remove_code_source", token.meta["metadata"], line=cell_line) or ("remove_input" in tags) or ("remove-input" in tags) ) @@ -194,9 +192,7 @@ def render_nb_cell_code(self: SelfType, token: SyntaxTreeNode) -> None: with self.current_node_context(cell_container, append=True): # render the code source code if not remove_input: - cell_input = nodes.container( - nb_element="cell_code_source", classes=["cell_input"] - ) + cell_input = nodes.container(nb_element="cell_code_source", classes=["cell_input"]) self.add_line_and_source_path(cell_input, token) with self.current_node_context(cell_input, append=True): self._render_nb_cell_code_source(token) @@ -324,9 +320,7 @@ def string(self) -> str: class NbElementRenderer: """A class for rendering notebook elements.""" - def __init__( - self, renderer: DocutilsNbRenderer | SphinxNbRenderer, logger: LoggerType - ) -> None: + def __init__(self, renderer: DocutilsNbRenderer | SphinxNbRenderer, logger: LoggerType) -> None: """Initialize the renderer. :params output_folder: the folder path for external outputs (like images) @@ -360,9 +354,7 @@ def source(self): """The source of the notebook.""" return self.renderer.document["source"] - def write_file( - self, path: list[str], content: bytes, overwrite=False, exists_ok=False - ) -> str: + def write_file(self, path: list[str], content: bytes, overwrite=False, exists_ok=False) -> str: """Write a file to the external output folder. :param path: the path to write the file to, relative to the output folder @@ -446,9 +438,7 @@ def render_raw_cell( # skip without warning, since e.g. jupytext saves raw cells with no format return [] return self.render_mime_type( - MimeData( - mime_type, content, metadata, cell_index=cell_index, line=source_line - ) + MimeData(mime_type, content, metadata, cell_index=cell_index, line=source_line) ) def render_stdout( @@ -609,9 +599,7 @@ def render_text_plain(self, data: MimeData) -> list[nodes.Element]: def render_text_html(self, data: MimeData) -> list[nodes.Element]: """Render a notebook text/html mime data output.""" - return [ - nodes.raw(text=data.string, format="html", classes=["output", "text_html"]) - ] + return [nodes.raw(text=data.string, format="html", classes=["output", "text_html"])] def render_text_latex(self, data: MimeData) -> list[nodes.Element]: """Render a notebook text/latex mime data output.""" @@ -643,9 +631,7 @@ def render_image(self, data: MimeData) -> list[nodes.Element]: # ensure correct line separator data_bytes = os.linesep.join(data.content.splitlines()).encode("utf-8") # create filename - extension = ( - guess_extension(data.mime_type) or "." + data.mime_type.rsplit("/")[-1] - ) + extension = guess_extension(data.mime_type) or "." + data.mime_type.rsplit("/")[-1] # latex does not recognize the '.jpe' extension extension = ".jpeg" if extension == ".jpe" else extension # ensure de-duplication of outputs by using hash as filename @@ -752,9 +738,7 @@ def render_markdown_inline(self, data: MimeData) -> list[nodes.Element]: def render_text_plain_inline(self, data: MimeData) -> list[nodes.Element]: """Render a notebook text/plain mime data output.""" content = data.string - if data.output_metadata.get("strip_text_quotes", False) and _QUOTED_RE.match( - content - ): + if data.output_metadata.get("strip_text_quotes", False) and _QUOTED_RE.match(content): content = content[1:-1] node = nodes.inline(data.string, content, classes=["output", "text_plain"]) return [node] @@ -1207,9 +1191,7 @@ def base_render_priority() -> dict[str, dict[str, int | None]]: } -def get_mime_priority( - builder: str, overrides: Sequence[tuple[str, str, int | None]] -) -> list[str]: +def get_mime_priority(builder: str, overrides: Sequence[tuple[str, str, int | None]]) -> list[str]: """Return the priority list for the builder. Takes the base priority list, overrides from the config, @@ -1222,7 +1204,5 @@ def get_mime_priority( for override in overrides: if override[0] == "*" or override[0] == builder: base[override[1]] = override[2] - sort = sorted( - ((k, p) for k, p in base.items() if p is not None), key=lambda x: x[1] - ) + sort = sorted(((k, p) for k, p in base.items() if p is not None), key=lambda x: x[1]) return [k for k, _ in sort] diff --git a/myst_nb/core/variables.py b/myst_nb/core/variables.py index 0cfffa5f..4e751968 100644 --- a/myst_nb/core/variables.py +++ b/myst_nb/core/variables.py @@ -148,8 +148,7 @@ def _render_output_docutils( if output.data: return [ create_warning( - "No output mime type found from render_priority " - f"(output<{output.index}>)", + "No output mime type found from render_priority " f"(output<{output.index}>)", document, line, output.vtype, diff --git a/myst_nb/docutils_.py b/myst_nb/docutils_.py index 7731f0b5..ea1b2d75 100644 --- a/myst_nb/docutils_.py +++ b/myst_nb/docutils_.py @@ -142,9 +142,7 @@ def _parse(self, inputstring: str, document: nodes.document) -> None: warning = lambda wtype, msg: create_warning( # noqa: E731 document, msg, line=1, append_to=document, subtype=wtype ) - nb_reader.md_config = merge_file_level( - nb_reader.md_config, notebook.metadata, warning - ) + nb_reader.md_config = merge_file_level(nb_reader.md_config, notebook.metadata, warning) # Update mystnb configuration with notebook level metadata if nb_config.metadata_key in notebook.metadata: @@ -157,9 +155,7 @@ def _parse(self, inputstring: str, document: nodes.document) -> None: subtype="config", ) else: - logger.debug( - "Updated configuration with notebook metadata", subtype="config" - ) + logger.debug("Updated configuration with notebook metadata", subtype="config") # Setup the markdown parser mdit_parser = create_md_parser(nb_reader.md_config, DocutilsNbRenderer) @@ -171,9 +167,7 @@ def _parse(self, inputstring: str, document: nodes.document) -> None: # load notebook element renderer class from entry-point name # this is separate from DocutilsNbRenderer, so that users can override it renderer_name = nb_config.render_plugin - nb_renderer: NbElementRenderer = load_renderer(renderer_name)( - mdit_renderer, logger - ) + nb_renderer: NbElementRenderer = load_renderer(renderer_name)(mdit_renderer, logger) # we temporarily store nb_renderer on the document, # so that roles/directives can access it document.attributes["nb_renderer"] = nb_renderer @@ -271,23 +265,17 @@ def _render_nb_cell_code_outputs( for output_index, output in enumerate(outputs): if output.output_type == "stream": if output.name == "stdout": - _nodes = self.nb_renderer.render_stdout( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_stdout(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) elif output.name == "stderr": - _nodes = self.nb_renderer.render_stderr( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_stderr(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) else: pass # TODO warning elif output.output_type == "error": - _nodes = self.nb_renderer.render_error( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_error(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) elif output.output_type in ("display_data", "execute_result"): @@ -311,9 +299,7 @@ def _render_nb_cell_code_outputs( ) else: figure_options = ( - self.get_cell_level_config( - "render_figure_options", metadata, line=line - ) + self.get_cell_level_config("render_figure_options", metadata, line=line) or None ) @@ -342,9 +328,7 @@ def _render_nb_cell_code_outputs( ) -def _run_cli( - writer_name: str, builder_name: str, writer_description: str, argv: list[str] | None -): +def _run_cli(writer_name: str, builder_name: str, writer_description: str, argv: list[str] | None): """Run the command line interface for a particular writer.""" publish_cmdline( parser=Parser(), diff --git a/myst_nb/ext/download.py b/myst_nb/ext/download.py index c4bf8409..45d8cccf 100644 --- a/myst_nb/ext/download.py +++ b/myst_nb/ext/download.py @@ -3,9 +3,10 @@ from docutils import nodes from sphinx.addnodes import download_reference -from sphinx.environment import BuildEnvironment from sphinx.util.docutils import ReferenceRole +from myst_nb.sphinx_ import SphinxEnvType + class NbDownloadRole(ReferenceRole): """Role to download an executed notebook.""" @@ -13,8 +14,8 @@ class NbDownloadRole(ReferenceRole): def run(self): """Run the role.""" # get a path relative to the current document - self.env: BuildEnvironment - path = Path(self.env.mystnb_config.output_folder).joinpath( # type: ignore + self.env: SphinxEnvType + path = Path(self.env.mystnb_config.output_folder).joinpath( *(self.env.docname.split("/")[:-1] + self.target.split("/")) ) reftarget = ( @@ -25,7 +26,5 @@ def run(self): node = download_reference(self.rawtext, reftarget=reftarget) self.set_source_info(node) title = self.title if self.has_explicit_title else self.target - node += nodes.literal( - self.rawtext, title, classes=["xref", "download", "myst-nb"] - ) + node += nodes.literal(self.rawtext, title, classes=["xref", "download", "myst-nb"]) return [node], [] diff --git a/myst_nb/ext/eval/__init__.py b/myst_nb/ext/eval/__init__.py index 52c04cd2..10d28f34 100644 --- a/myst_nb/ext/eval/__init__.py +++ b/myst_nb/ext/eval/__init__.py @@ -161,9 +161,7 @@ def run(self): render: dict[str, Any] = {} for key in ("alt", "height", "width", "scale", "class"): if key in self.options: - render.setdefault("image", {})[ - key.replace("classes", "class") - ] = self.options[key] + render.setdefault("image", {})[key.replace("classes", "class")] = self.options[key] mime_nodes = render_variable_outputs( data, self.document, self.line, self.source, render=render diff --git a/myst_nb/ext/execution_tables.py b/myst_nb/ext/execution_tables.py index f349c401..47b02e2a 100644 --- a/myst_nb/ext/execution_tables.py +++ b/myst_nb/ext/execution_tables.py @@ -62,9 +62,7 @@ def update_exec_tables(app: Sphinx, env: SphinxEnvType): if data.get(METADATA_KEY) ] if to_update: - SPHINX_LOGGER.info( - f"Updating {len(to_update)} file(s) with execution table [mystnb]" - ) + SPHINX_LOGGER.info(f"Updating {len(to_update)} file(s) with execution table [mystnb]") return to_update @@ -78,9 +76,7 @@ def run(self, **kwargs) -> None: self.env: SphinxEnvType for node in self.document.traverse(ExecutionStatsNode): node.replace_self( - make_stat_table( - self.env.docname, NbMetadataCollector.get_doc_data(self.env) - ) + make_stat_table(self.env.docname, NbMetadataCollector.get_doc_data(self.env)) ) @@ -92,18 +88,14 @@ def run(self, **kwargs) -> None: } _key2transform: dict[str, Callable[[Any], str]] = { - "mtime": lambda x: datetime.fromtimestamp(x).strftime("%Y-%m-%d %H:%M") - if x - else "", + "mtime": lambda x: datetime.fromtimestamp(x).strftime("%Y-%m-%d %H:%M") if x else "", "method": str, "runtime": lambda x: "-" if x is None else str(round(x, 2)), "succeeded": lambda x: "✅" if x is True else "❌", } -def make_stat_table( - parent_docname: str, metadata: DefaultDict[str, dict] -) -> nodes.table: +def make_stat_table(parent_docname: str, metadata: DefaultDict[str, dict]) -> nodes.table: """Create a table of statistics on executed notebooks.""" # top-level element diff --git a/myst_nb/ext/glue/__init__.py b/myst_nb/ext/glue/__init__.py index 8f35284a..b599e801 100644 --- a/myst_nb/ext/glue/__init__.py +++ b/myst_nb/ext/glue/__init__.py @@ -78,9 +78,7 @@ def glue(name: str, variable: Any, display: bool = True) -> None: mimebundle, metadata = IPython.core.formatters.format_display_data(variable) mime_prefix = "" if display else GLUE_PREFIX metadata["scrapbook"] = dict(name=name, mime_prefix=mime_prefix) - ipy_display( - {mime_prefix + k: v for k, v in mimebundle.items()}, raw=True, metadata=metadata - ) + ipy_display({mime_prefix + k: v for k, v in mimebundle.items()}, raw=True, metadata=metadata) def extract_glue_data( diff --git a/myst_nb/ext/glue/crossref.py b/myst_nb/ext/glue/crossref.py index f2f0c76c..a2e9ba7f 100644 --- a/myst_nb/ext/glue/crossref.py +++ b/myst_nb/ext/glue/crossref.py @@ -48,9 +48,7 @@ def apply(self, **kwargs): """Apply the transform.""" cache_folder = self.env.mystnb_config.output_folder # type: ignore bname = self.app.builder.name - priority_list = get_mime_priority( - bname, self.config["nb_mime_priority_overrides"] - ) + priority_list = get_mime_priority(bname, self.config["nb_mime_priority_overrides"]) node: PendingGlueReference for node in list(findall(self.document)(PendingGlueReference)): data = read_glue_cache(cache_folder, node.refdoc) @@ -100,14 +98,8 @@ def generate_any_nodes( else: return [nodes.literal_block(data[mime_type], data[mime_type])] if mime_type == "text/html": - return [ - nodes.raw( - text=data[mime_type], format="html", classes=["output", "text_html"] - ) - ] - ref_warning( - f"No allowed mime type found in {node.key!r}: {list(output['data'])}", node - ) + return [nodes.raw(text=data[mime_type], format="html", classes=["output", "text_html"])] + ref_warning(f"No allowed mime type found in {node.key!r}: {list(output['data'])}", node) return [] diff --git a/myst_nb/ext/glue/directives.py b/myst_nb/ext/glue/directives.py index 4d42c15a..58016aba 100644 --- a/myst_nb/ext/glue/directives.py +++ b/myst_nb/ext/glue/directives.py @@ -151,9 +151,7 @@ def run(self): render: Dict[str, Any] = {} for key in ("alt", "height", "width", "scale", "class"): if key in self.options: - render.setdefault("image", {})[ - key.replace("classes", "class") - ] = self.options[key] + render.setdefault("image", {})[key.replace("classes", "class")] = self.options[key] paste_nodes = render_variable_outputs( [data], self.document, self.line, self.source, render=render ) diff --git a/myst_nb/ext/glue/roles.py b/myst_nb/ext/glue/roles.py index ee8a5b20..05fc70c6 100644 --- a/myst_nb/ext/glue/roles.py +++ b/myst_nb/ext/glue/roles.py @@ -121,18 +121,14 @@ def run(self) -> tuple[list[nodes.Node], list[nodes.system_message]]: ] if "text/plain" not in result.data: return [], [ - glue_warning( - f"No text/plain found in {key!r} data", self.document, self.line - ) + glue_warning(f"No text/plain found in {key!r} data", self.document, self.line) ] try: text = format_plain_text(result.data["text/plain"], fmt_spec) except Exception as exc: return [], [ - glue_warning( - f"Failed to format text/plain data: {exc}", self.document, self.line - ) + glue_warning(f"Failed to format text/plain data: {exc}", self.document, self.line) ] node = nodes.inline(text, text, classes=["pasted-text"]) self.set_source_info(node) diff --git a/myst_nb/ext/glue/utils.py b/myst_nb/ext/glue/utils.py index a61b373a..84136953 100644 --- a/myst_nb/ext/glue/utils.py +++ b/myst_nb/ext/glue/utils.py @@ -60,19 +60,13 @@ def create_pending_glue_ref( ) -> PendingGlueReference: """Create a pending glue reference.""" if not is_sphinx(document): - raise PendingGlueReferenceError( - "Pending glue references are only supported in sphinx." - ) + raise PendingGlueReferenceError("Pending glue references are only supported in sphinx.") env: BuildEnvironment = document.settings.env _, filepath = env.relfn2path(rel_doc, env.docname) refdoc = env.path2doc(filepath) if refdoc is None: - raise PendingGlueReferenceError( - f"Pending glue reference document not found: {filepath!r}." - ) - ref = PendingGlueReference( - refdoc=refdoc, key=key, inline=inline, gtype=gtype, **kwargs - ) + raise PendingGlueReferenceError(f"Pending glue reference document not found: {filepath!r}.") + ref = PendingGlueReference(refdoc=refdoc, key=key, inline=inline, gtype=gtype, **kwargs) ref.source = source ref.line = line return ref diff --git a/myst_nb/sphinx_.py b/myst_nb/sphinx_.py index 4885d9ff..f07988ae 100644 --- a/myst_nb/sphinx_.py +++ b/myst_nb/sphinx_.py @@ -92,9 +92,7 @@ def parse(self, inputstring: str, document: nodes.document) -> None: warning = lambda wtype, msg: create_warning( # noqa: E731 document, msg, line=1, append_to=document, subtype=wtype ) - nb_reader.md_config = merge_file_level( - nb_reader.md_config, notebook.metadata, warning - ) + nb_reader.md_config = merge_file_level(nb_reader.md_config, notebook.metadata, warning) # potentially replace kernel name with alias kernel_name = notebook.metadata.get("kernelspec", {}).get("name", None) @@ -120,9 +118,7 @@ def parse(self, inputstring: str, document: nodes.document) -> None: subtype="config", ) else: - logger.debug( - "Updated configuration with notebook metadata", subtype="config" - ) + logger.debug("Updated configuration with notebook metadata", subtype="config") # Setup the parser mdit_parser = create_md_parser(nb_reader.md_config, SphinxNbRenderer) @@ -134,9 +130,7 @@ def parse(self, inputstring: str, document: nodes.document) -> None: # load notebook element renderer class from entry-point name # this is separate from SphinxNbRenderer, so that users can override it renderer_name = nb_config.render_plugin - nb_renderer: NbElementRenderer = load_renderer(renderer_name)( - mdit_renderer, logger - ) + nb_renderer: NbElementRenderer = load_renderer(renderer_name)(mdit_renderer, logger) # we temporarily store nb_renderer on the document, # so that roles/directives can access it document.attributes["nb_renderer"] = nb_renderer @@ -158,18 +152,14 @@ def parse(self, inputstring: str, document: nodes.document) -> None: # save final execution data if nb_client.exec_metadata: - NbMetadataCollector.set_exec_data( - self.env, self.env.docname, nb_client.exec_metadata - ) + NbMetadataCollector.set_exec_data(self.env, self.env.docname, nb_client.exec_metadata) if nb_client.exec_metadata["traceback"]: # store error traceback in outdir and log its path reports_file = Path(self.env.app.outdir).joinpath( "reports", *(self.env.docname + ".err.log").split("/") ) reports_file.parent.mkdir(parents=True, exist_ok=True) - reports_file.write_text( - nb_client.exec_metadata["traceback"], encoding="utf8" - ) + reports_file.write_text(nb_client.exec_metadata["traceback"], encoding="utf8") logger.warning( f"Notebook exception traceback saved in: {reports_file}", subtype="exec", @@ -199,9 +189,7 @@ def parse(self, inputstring: str, document: nodes.document) -> None: # so that we can later read it from the environment, # rather than having to load the whole doctree for key, (uri, kwargs) in document.attributes.pop("nb_js_files", {}).items(): - NbMetadataCollector.add_js_file( - self.env, self.env.docname, key, uri, kwargs - ) + NbMetadataCollector.add_js_file(self.env, self.env.docname, key, uri, kwargs) # remove temporary state document.attributes.pop("nb_renderer") @@ -244,23 +232,17 @@ def _render_nb_cell_code_outputs( for output_index, output in enumerate(outputs): if output.output_type == "stream": if output.name == "stdout": - _nodes = self.nb_renderer.render_stdout( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_stdout(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) elif output.name == "stderr": - _nodes = self.nb_renderer.render_stderr( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_stderr(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) else: pass # TODO warning elif output.output_type == "error": - _nodes = self.nb_renderer.render_error( - output, metadata, cell_index, line - ) + _nodes = self.nb_renderer.render_error(output, metadata, cell_index, line) self.add_line_and_source_path_r(_nodes, token) self.current_node.extend(_nodes) elif output.output_type in ("display_data", "execute_result"): @@ -273,10 +255,7 @@ def _render_nb_cell_code_outputs( # in a post-transform (run per output format on the cached AST) figure_options = ( - self.get_cell_level_config( - "render_figure_options", metadata, line=line - ) - or None + self.get_cell_level_config("render_figure_options", metadata, line=line) or None ) with create_figure_context(self, figure_options, line): @@ -325,9 +304,7 @@ def run(self, **kwargs: Any) -> None: # get priority list for this builder # TODO allow for per-notebook/cell priority dicts? bname = self.app.builder.name - priority_list = get_mime_priority( - bname, self.config["nb_mime_priority_overrides"] - ) + priority_list = get_mime_priority(bname, self.config["nb_mime_priority_overrides"]) def condition(node): return ( @@ -385,9 +362,7 @@ def get_doc_data(env: SphinxEnvType) -> DefaultDict[str, dict]: return env.nb_metadata @classmethod - def set_exec_data( - cls, env: SphinxEnvType, docname: str, value: ExecutionResult - ) -> None: + def set_exec_data(cls, env: SphinxEnvType, docname: str, value: ExecutionResult) -> None: """Add nb metadata for a docname to the environment.""" cls.set_doc_data(env, docname, "exec_data", value) # TODO this does not take account of cache data @@ -510,11 +485,7 @@ class HideInputCells(SphinxPostTransform): def run(self, **kwargs): for node in findall(self.document)(nodes.container): - if ( - node.get("nb_element") == "cell_code" - and node.get("hide_mode") - and node.children - ): + if node.get("nb_element") == "cell_code" and node.get("hide_mode") and node.children: hide_mode = node.get("hide_mode") has_input = node.children[0].get("nb_element") == "cell_code_source" has_output = node.children[-1].get("nb_element") == "cell_code_output" diff --git a/myst_nb/sphinx_ext.py b/myst_nb/sphinx_ext.py index 780fbce5..408791e8 100644 --- a/myst_nb/sphinx_ext.py +++ b/myst_nb/sphinx_ext.py @@ -54,9 +54,7 @@ def sphinx_setup(app: Sphinx): # TODO add types? app.add_config_value(f"nb_{name}", default, "env", Any) if "legacy_name" in field.metadata: - app.add_config_value( - f"{field.metadata['legacy_name']}", _UNSET, "env", Any - ) + app.add_config_value(f"{field.metadata['legacy_name']}", _UNSET, "env", Any) # Handle non-standard deprecation app.add_config_value("nb_render_priority", _UNSET, "env", Any) @@ -157,9 +155,7 @@ def create_mystnb_config(app): try: app.env.mystnb_config = NbParserConfig(**values) - SPHINX_LOGGER.info( - bold("myst-nb v%s:") + " %s", __version__, app.env.mystnb_config - ) + SPHINX_LOGGER.info(bold("myst-nb v%s:") + " %s", __version__, app.env.mystnb_config) except (TypeError, ValueError) as error: SPHINX_LOGGER.critical("myst-nb configuration invalid: %s", error.args[0]) raise @@ -194,9 +190,7 @@ def _import_resources_path(package: ModuleType, resource: str) -> Iterator[Path] with import_resources.path(package, resource) as path: yield path else: - with import_resources.as_file( - import_resources.files(package).joinpath(resource) - ) as path: + with import_resources.as_file(import_resources.files(package).joinpath(resource)) as path: yield path @@ -213,15 +207,11 @@ def add_global_html_resources(app: Sphinx, exception): if app.builder is not None and app.builder.format == "html" and not exception: with _import_resources_path(static, "mystnb.css") as source_path: hash = _get_file_hash(source_path) - destination = os.path.join( - app.builder.outdir, "_static", f"mystnb.{hash}.css" - ) + destination = os.path.join(app.builder.outdir, "_static", f"mystnb.{hash}.css") copy_asset_file(str(source_path), destination) -def add_per_page_html_resources( - app: Sphinx, pagename: str, *args: Any, **kwargs: Any -) -> None: +def add_per_page_html_resources(app: Sphinx, pagename: str, *args: Any, **kwargs: Any) -> None: """Add JS files for this page, identified from the parsing of the notebook.""" if app.env is None or app.builder is None or app.builder.format != "html": return diff --git a/myst_nb/warnings_.py b/myst_nb/warnings_.py index de8e66ce..165d0362 100644 --- a/myst_nb/warnings_.py +++ b/myst_nb/warnings_.py @@ -35,9 +35,7 @@ class MystNBWarnings(Enum): """Issue with a cell's configuration or metadata.""" -def _is_suppressed_warning( - type: str, subtype: str, suppress_warnings: Sequence[str] -) -> bool: +def _is_suppressed_warning(type: str, subtype: str, suppress_warnings: Sequence[str]) -> bool: """Check whether the warning is suppressed or not. Mirrors: diff --git a/pyproject.toml b/pyproject.toml index 57c16f0e..ac554918 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,13 +144,13 @@ follow_imports = "skip" module = ["docutils.*", "nbformat.*", "jupyter_cache.*", "IPython.*", "pygments.*"] ignore_missing_imports = true -[tool.isort] -profile = "black" -src_paths = ["myst_nb", "tests"] -force_sort_within_sections = true - [tool.ruff] -extend-ignore = [ +line-length = 100 + +[tool.ruff.lint] +ignore = [ "E203", # Whitespace before punctuation ] -line-length = 100 + +[tool.ruff.lint.isort] +force-sort-within-sections = true diff --git a/tests/conftest.py b/tests/conftest.py index 1a50b8b3..c4340457 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -171,9 +171,7 @@ def sphinx_run(sphinx_params, make_app, tmp_path): os.chdir(base_dir) (srcdir / "conf.py").write_text( "# conf overrides (passed directly to sphinx):\n" - + "\n".join( - ["# " + ll for ll in json.dumps(confoverrides, indent=2).splitlines()] - ) + + "\n".join(["# " + ll for ll in json.dumps(confoverrides, indent=2).splitlines()]) + "\n" ) @@ -181,9 +179,7 @@ def sphinx_run(sphinx_params, make_app, tmp_path): nb_path = TEST_FILE_DIR.joinpath(nb_file) assert nb_path.exists(), nb_path (srcdir / nb_file).parent.mkdir(exist_ok=True) - (srcdir / nb_file).write_text( - nb_path.read_text(encoding="utf-8"), encoding="utf-8" - ) + (srcdir / nb_file).write_text(nb_path.read_text(encoding="utf-8"), encoding="utf-8") nocolor() @@ -195,9 +191,7 @@ def sphinx_run(sphinx_params, make_app, tmp_path): from sphinx.testing.path import path app_srcdir = path(os.fspath(srcdir)) - app = make_app( - buildername=buildername, srcdir=app_srcdir, confoverrides=confoverrides - ) + app = make_app(buildername=buildername, srcdir=app_srcdir, confoverrides=confoverrides) yield SphinxFixture(app, sphinx_params["files"]) @@ -212,8 +206,7 @@ def empty_non_deterministic_outputs(cell): item.data["image/png"] = "" if "filenames" in item.get("metadata", {}): item["metadata"]["filenames"] = { - k: os.path.basename(v) - for k, v in item["metadata"]["filenames"].items() + k: os.path.basename(v) for k, v in item["metadata"]["filenames"].items() } if "traceback" in item: item["traceback"] = [strip_ansi(line) for line in item["traceback"]] @@ -233,13 +226,9 @@ def _check_nbs(obtained_filename, expected_filename): empty_non_deterministic_outputs(cell) cell.id = "none" diff = diff_notebooks(obtained_nb, expect_nb) - filename_without_path = str(expected_filename)[ - str(expected_filename).rfind("/") + 1 : - ] + filename_without_path = str(expected_filename)[str(expected_filename).rfind("/") + 1 :] if diff: - raise AssertionError( - pretty_print_diff(obtained_nb, diff, str(filename_without_path)) - ) + raise AssertionError(pretty_print_diff(obtained_nb, diff, str(filename_without_path))) return _check_nbs @@ -250,15 +239,11 @@ def _func(doctree): if os.name == "nt": # on Windows file paths are absolute for node in doctree.traverse(image_node): # type: image_node if "candidates" in node: - node["candidates"][ - "*" - ] = "_build/jupyter_execute/" + os.path.basename( + node["candidates"]["*"] = "_build/jupyter_execute/" + os.path.basename( node["candidates"]["*"] ) if "uri" in node: - node["uri"] = "_build/jupyter_execute/" + os.path.basename( - node["uri"] - ) + node["uri"] = "_build/jupyter_execute/" + os.path.basename(node["uri"]) return doctree return _func diff --git a/tests/test_codecell_file.py b/tests/test_codecell_file.py index 1af9ce8f..366c0a55 100644 --- a/tests/test_codecell_file.py +++ b/tests/test_codecell_file.py @@ -66,9 +66,7 @@ def test_codecell_file_warnings(sphinx_run, file_regression, check_nbs, get_test assert set(sphinx_run.env.nb_metadata["mystnb_codecell_file_warnings"].keys()) == { "exec_data", } - assert ( - sphinx_run.env.metadata["mystnb_codecell_file_warnings"]["author"] == "Aakash" - ) + assert sphinx_run.env.metadata["mystnb_codecell_file_warnings"]["author"] == "Aakash" assert sphinx_run.env.metadata["mystnb_codecell_file_warnings"]["kernelspec"] == { "display_name": "Python 3", "language": "python", diff --git a/tests/test_execute.py b/tests/test_execute.py index f39d1df0..fe3ac932 100644 --- a/tests/test_execute.py +++ b/tests/test_execute.py @@ -114,9 +114,7 @@ def test_exclude_path(sphinx_run, file_regression): sphinx_run.build() assert not NbMetadataCollector.new_exec_data(sphinx_run.env) assert "Executing" not in sphinx_run.status(), sphinx_run.status() - file_regression.check( - sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8" - ) + file_regression.check(sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8") @pytest.mark.skipif(ipy_version[0] < 8, reason="Error message changes for ipython v8") @@ -206,9 +204,7 @@ def test_raise_on_error_cache(sphinx_run): sphinx_run.build() -@pytest.mark.sphinx_params( - "complex_outputs_unrun.ipynb", conf={"nb_execution_mode": "cache"} -) +@pytest.mark.sphinx_params("complex_outputs_unrun.ipynb", conf={"nb_execution_mode": "cache"}) def test_complex_outputs_unrun_cache(sphinx_run, file_regression, check_nbs): sphinx_run.build() # print(sphinx_run.status()) @@ -218,18 +214,14 @@ def test_complex_outputs_unrun_cache(sphinx_run, file_regression, check_nbs): # Widget view and widget state should make it into the HTML scripts = sphinx_run.get_html().select("script") assert any( - "application/vnd.jupyter.widget-view+json" in script.get("type", "") - for script in scripts + "application/vnd.jupyter.widget-view+json" in script.get("type", "") for script in scripts ) assert any( - "application/vnd.jupyter.widget-state+json" in script.get("type", "") - for script in scripts + "application/vnd.jupyter.widget-state+json" in script.get("type", "") for script in scripts ) -@pytest.mark.sphinx_params( - "complex_outputs_unrun.ipynb", conf={"nb_execution_mode": "auto"} -) +@pytest.mark.sphinx_params("complex_outputs_unrun.ipynb", conf={"nb_execution_mode": "auto"}) def test_complex_outputs_unrun_auto(sphinx_run, file_regression, check_nbs): sphinx_run.build() # print(sphinx_run.status()) @@ -239,12 +231,10 @@ def test_complex_outputs_unrun_auto(sphinx_run, file_regression, check_nbs): # Widget view and widget state should make it into the HTML scripts = sphinx_run.get_html().select("script") assert any( - "application/vnd.jupyter.widget-view+json" in script.get("type", "") - for script in scripts + "application/vnd.jupyter.widget-view+json" in script.get("type", "") for script in scripts ) assert any( - "application/vnd.jupyter.widget-state+json" in script.get("type", "") - for script in scripts + "application/vnd.jupyter.widget-state+json" in script.get("type", "") for script in scripts ) @@ -319,9 +309,7 @@ def test_nb_exec_table(sphinx_run, file_regression): sphinx_run.build() # print(sphinx_run.status()) assert not sphinx_run.warnings() - file_regression.check( - sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8" - ) + file_regression.check(sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8") # print(sphinx_run.get_html()) rows = sphinx_run.get_html().select("table.docutils tr") assert any("nb_exec_table" in row.text for row in rows) diff --git a/tests/test_parser.py b/tests/test_parser.py index 37f510b8..ccc78af0 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -23,15 +23,11 @@ def test_basic_run(sphinx_run, file_regression): "language": "python", "name": "python3", } - file_regression.check( - sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8" - ) + file_regression.check(sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8") filenames = { p.name - for p in Path( - os.fspath(sphinx_run.app.srcdir / "_build" / "jupyter_execute") - ).iterdir() + for p in Path(os.fspath(sphinx_run.app.srcdir / "_build" / "jupyter_execute")).iterdir() } assert filenames == {"basic_run.ipynb"} @@ -64,16 +60,12 @@ def test_complex_outputs(sphinx_run, file_regression): } doctree_string = sphinx_run.get_doctree().pformat() if os.name == "nt": # on Windows image file paths are absolute - doctree_string = doctree_string.replace( - Path(sphinx_run.app.srcdir).as_posix() + "/", "" - ) + doctree_string = doctree_string.replace(Path(sphinx_run.app.srcdir).as_posix() + "/", "") file_regression.check(doctree_string, extension=".xml", encoding="utf-8") filenames = { p.name.replace(".jpeg", ".jpg") - for p in Path( - os.fspath(sphinx_run.app.srcdir / "_build" / "jupyter_execute") - ).iterdir() + for p in Path(os.fspath(sphinx_run.app.srcdir / "_build" / "jupyter_execute")).iterdir() } # print(filenames) assert filenames == { @@ -94,9 +86,7 @@ def test_toctree_in_ipynb(sphinx_run, file_regression): sphinx_run.build() print(sphinx_run.status()) print(sphinx_run.warnings()) - file_regression.check( - sphinx_run.get_doctree("latex_build/other").pformat(), extension=".xml" - ) + file_regression.check(sphinx_run.get_doctree("latex_build/other").pformat(), extension=".xml") assert sphinx_run.warnings() == "" diff --git a/tests/test_text_based.py b/tests/test_text_based.py index 5140de47..2ac4a12f 100644 --- a/tests/test_text_based.py +++ b/tests/test_text_based.py @@ -29,9 +29,7 @@ def test_basic_run(sphinx_run, file_regression, check_nbs): file_regression.check( sphinx_run.get_nb(), check_fn=check_nbs, extension=".ipynb", encoding="utf-8" ) - file_regression.check( - sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8" - ) + file_regression.check(sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8") @pytest.mark.sphinx_params( @@ -54,9 +52,7 @@ def test_basic_run_exec_off(sphinx_run, file_regression, check_nbs): file_regression.check( sphinx_run.get_nb(), check_fn=check_nbs, extension=".ipynb", encoding="utf-8" ) - file_regression.check( - sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8" - ) + file_regression.check(sphinx_run.get_doctree().pformat(), extension=".xml", encoding="utf-8") @pytest.mark.sphinx_params(