diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml
new file mode 100644
index 0000000000..0661727bba
--- /dev/null
+++ b/.JuliaFormatter.toml
@@ -0,0 +1,20 @@
+margin = 92
+indent = 4
+remove_extra_newlines = false
+always_use_return = false
+short_to_long_function_def = false
+always_for_in = false
+import_to_using = false
+pipe_to_function_call = false
+whitespace_ops_in_indices = false
+whitespace_in_kwargs = false
+whitespace_typedefs = false
+annotate_untyped_fields_with_any = false
+format_docstrings = false
+conditional_to_if = true
+normalize_line_endings = "unix"
+align_assignment = true
+align_struct_field = true
+align_pair_arrow = true
+align_matrix = true
+trailing_comma = "nothing"
diff --git a/docs/DocumenterShowcase.jl b/docs/DocumenterShowcase.jl
index 8f0d29e1a3..f4a8a77b67 100644
--- a/docs/DocumenterShowcase.jl
+++ b/docs/DocumenterShowcase.jl
@@ -89,17 +89,20 @@ function hello(who)
end
struct SVGCircle
- stroke :: String
- fill :: String
+ stroke::String
+ fill::String
end
function Base.show(io, ::MIME"image/svg+xml", c::SVGCircle)
- write(io, """
-
- """)
+ write(
+ io,
+ """
+
+"""
+ )
end
"The type definition."
@@ -109,6 +112,6 @@ struct Foo{T,S} end
Foo() = Foo{Nothing,Nothing}()
"Constructor `Foo{T}()` with one parametric argument."
-Foo{T}() where T = Foo{T,Nothing}()
+Foo{T}() where {T} = Foo{T,Nothing}()
end # module
diff --git a/docs/changelog.jl b/docs/changelog.jl
index 4e83a1de94..a91b7f253c 100644
--- a/docs/changelog.jl
+++ b/docs/changelog.jl
@@ -3,5 +3,5 @@ using Changelog
Changelog.generate(
Changelog.CommonMark(),
joinpath(@__DIR__, "..", "CHANGELOG.md");
- repo = "JuliaDocs/Documenter.jl",
+ repo="JuliaDocs/Documenter.jl",
)
diff --git a/docs/instantiate.jl b/docs/instantiate.jl
index f5d0508941..e0b676f4aa 100644
--- a/docs/instantiate.jl
+++ b/docs/instantiate.jl
@@ -11,12 +11,14 @@ cd(project_directory) do
@info "DocumenterTools already cloned to dev/DocumenterTools"
run(`git -C dev/DocumenterTools fetch origin`)
else
- run(`git clone -n https://github.com/JuliaDocs/DocumenterTools.jl.git dev/DocumenterTools`)
+ run(
+ `git clone -n https://github.com/JuliaDocs/DocumenterTools.jl.git dev/DocumenterTools`
+ )
end
run(`git -C dev/DocumenterTools checkout documenter-v0.1.17+1.0.0`)
Pkg.develop([
- PackageSpec(path = documenter_directory),
- PackageSpec(path = "dev/DocumenterTools"),
+ PackageSpec(path=documenter_directory),
+ PackageSpec(path="dev/DocumenterTools"),
])
Pkg.instantiate()
end
diff --git a/docs/make.jl b/docs/make.jl
index 856d62b7dc..d49d05074f 100644
--- a/docs/make.jl
+++ b/docs/make.jl
@@ -15,59 +15,63 @@ Changelog.generate(
Changelog.Documenter(),
joinpath(@__DIR__, "..", "CHANGELOG.md"),
joinpath(@__DIR__, "src", "release-notes.md");
- repo = "JuliaDocs/Documenter.jl",
+ repo="JuliaDocs/Documenter.jl",
)
makedocs(
- modules = [Documenter, DocumenterTools, DocumenterShowcase],
- format = if "pdf" in ARGS
- Documenter.LaTeX(platform = "docker")
+ modules=[Documenter, DocumenterTools, DocumenterShowcase],
+ format=if "pdf" in ARGS
+ Documenter.LaTeX(platform="docker")
else
Documenter.HTML(
# Use clean URLs, unless built as a "local" build
- prettyurls = !("local" in ARGS),
- canonical = "https://documenter.juliadocs.org/stable/",
- assets = ["assets/favicon.ico"],
- analytics = "UA-136089579-2",
- highlights = ["yaml"],
- ansicolor = true,
- size_threshold_ignore = ["release-notes.md"],
+ prettyurls=!("local" in ARGS),
+ canonical="https://documenter.juliadocs.org/stable/",
+ assets=["assets/favicon.ico"],
+ analytics="UA-136089579-2",
+ highlights=["yaml"],
+ ansicolor=true,
+ size_threshold_ignore=["release-notes.md"],
)
end,
- build = ("pdf" in ARGS) ? "build-pdf" : "build",
- debug = ("pdf" in ARGS),
- sitename = "Documenter.jl",
- authors = "Michael Hatherly, Morten Piibeleht, and contributors.",
- linkcheck = "linkcheck" in ARGS,
- linkcheck_ignore = [
+ build=("pdf" in ARGS) ? "build-pdf" : "build",
+ debug=("pdf" in ARGS),
+ sitename="Documenter.jl",
+ authors="Michael Hatherly, Morten Piibeleht, and contributors.",
+ linkcheck="linkcheck" in ARGS,
+ linkcheck_ignore=[
# We'll ignore links that point to GitHub's edit pages, as they redirect to the
# login screen and cause a warning:
r"https://github.com/([A-Za-z0-9_.-]+)/([A-Za-z0-9_.-]+)/edit(.*)",
"https://nvd.nist.gov/vuln/detail/CVE-2018-16487",
- ] ∪ (get(ENV, "GITHUB_ACTIONS", nothing) == "true" ? [
- # Extra ones we ignore only on CI.
- #
- # It seems that CTAN blocks GitHub Actions?
- "https://ctan.org/pkg/minted",
- ] : []),
- pages = [
+ ] ∪ (
+ if get(ENV, "GITHUB_ACTIONS", nothing) == "true"
+ [
+ # Extra ones we ignore only on CI.
+ #
+ # It seems that CTAN blocks GitHub Actions?
+ "https://ctan.org/pkg/minted",
+ ]
+ else
+ []
+ end
+ ),
+ pages=[
"Home" => "index.md",
"Manual" => Any[
- "Guide" => "man/guide.md",
+ "Guide"=>"man/guide.md",
"man/examples.md",
"man/syntax.md",
"man/doctests.md",
"man/latex.md",
- hide("man/hosting.md", [
- "man/hosting/walkthrough.md"
- ]),
+ hide("man/hosting.md", ["man/hosting/walkthrough.md"]),
"man/other-formats.md",
],
"showcase.md",
"Reference" => Any[
- "Public API" => "lib/public.md",
+ "Public API"=>"lib/public.md",
"lib/remote-links.md",
- "Semantic versioning" => "lib/semver.md",
+ "Semantic versioning"=>"lib/semver.md",
],
"Developers" => [
"contributing.md",
@@ -79,8 +83,8 @@ makedocs(
],
"release-notes.md",
],
- warnonly = ("strict=false" in ARGS),
- doctest = ("doctest=only" in ARGS) ? :only : true,
+ warnonly=("strict=false" in ARGS),
+ doctest=("doctest=only" in ARGS) ? :only : true,
)
if "pdf" in ARGS
@@ -89,21 +93,23 @@ if "pdf" in ARGS
let files = readdir(joinpath(@__DIR__, "build-pdf"))
for f in files
if startswith(f, "Documenter.jl") && endswith(f, ".pdf")
- mv(joinpath(@__DIR__, "build-pdf", f),
- joinpath(@__DIR__, "build-pdf", "commit", f))
+ mv(
+ joinpath(@__DIR__, "build-pdf", f),
+ joinpath(@__DIR__, "build-pdf", "commit", f)
+ )
end
end
end
deploydocs(
- repo = "github.com/JuliaDocs/Documenter.jl.git",
- target = "pdf/build-pdf/commit",
- branch = "gh-pages-pdf",
- forcepush = true,
+ repo="github.com/JuliaDocs/Documenter.jl.git",
+ target="pdf/build-pdf/commit",
+ branch="gh-pages-pdf",
+ forcepush=true,
)
else
deploydocs(
- repo = "github.com/JuliaDocs/Documenter.jl.git",
- target = "build",
- push_preview = true,
+ repo="github.com/JuliaDocs/Documenter.jl.git",
+ target="build",
+ push_preview=true,
)
end
diff --git a/src/DocMeta.jl b/src/DocMeta.jl
index fedf1a70b8..a5f63bdfe6 100644
--- a/src/DocMeta.jl
+++ b/src/DocMeta.jl
@@ -68,8 +68,16 @@ If `recursive` is set to `true`, it sets the same metadata value for all the sub
If `warn` is `true`, it prints a warning when `key` already exists and it gets rewritten.
"""
function setdocmeta!(m::Module, key::Symbol, value; warn=true, recursive=false)
- key in keys(VALIDMETA) || throw(ArgumentError("Invalid metadata key\nValid keys are: $(join(keys(VALIDMETA), ", "))"))
- isa(value, VALIDMETA[key]) || throw(ArgumentError("Bad value type ($(typeof(value))) for metadata key $(key). Must be <: $(VALIDMETA[key])"))
+ key in keys(VALIDMETA) || throw(
+ ArgumentError(
+ "Invalid metadata key\nValid keys are: $(join(keys(VALIDMETA), ", "))"
+ )
+ )
+ isa(value, VALIDMETA[key]) || throw(
+ ArgumentError(
+ "Bad value type ($(typeof(value))) for metadata key $(key). Must be <: $(VALIDMETA[key])"
+ )
+ )
if recursive
for mod in Documenter.submodules(m)
setdocmeta!(mod, key, value; warn=warn, recursive=false)
diff --git a/src/DocSystem.jl b/src/DocSystem.jl
index 0124d35870..468a4cfdf5 100644
--- a/src/DocSystem.jl
+++ b/src/DocSystem.jl
@@ -42,8 +42,10 @@ binding(f::Function) = binding(parentmodule(f), nameof(f))
#
# Note that `IntrinsicFunction` is exported from `Base` in `0.4`, but not in `0.5`.
#
-let INTRINSICS = Dict(map(s -> getfield(Core.Intrinsics, s) => s, names(Core.Intrinsics, all=true)))
- global binding(i::Core.IntrinsicFunction) = binding(Core.Intrinsics, INTRINSICS[i]::Symbol)
+let INTRINSICS =
+ Dict(map(s -> getfield(Core.Intrinsics, s) => s, names(Core.Intrinsics, all=true)))
+ global binding(i::Core.IntrinsicFunction) =
+ binding(Core.Intrinsics, INTRINSICS[i]::Symbol)
end
#
@@ -59,11 +61,15 @@ end
#
# Pseudo-eval of `Expr`s to find their equivalent `Binding`.
#
-binding(m::Module, x::Expr) =
- Meta.isexpr(x, :.) ? binding(getmod(m, x.args[1]), x.args[2].value) :
- Meta.isexpr(x, [:call, :macrocall, :curly]) ? binding(m, x.args[1]) :
- Meta.isexpr(x, :where) ? binding(m, x.args[1].args[1]) :
- error("`binding` cannot understand expression `$x`.")
+binding(m::Module, x::Expr) = if Meta.isexpr(x, :.)
+ binding(getmod(m, x.args[1]), x.args[2].value)
+elseif Meta.isexpr(x, [:call, :macrocall, :curly])
+ binding(m, x.args[1])
+elseif Meta.isexpr(x, :where)
+ binding(m, x.args[1].args[1])
+else
+ error("`binding` cannot understand expression `$x`.")
+end
# Helper methods for the above `binding` method.
getmod(m::Module, x::Expr) = getfield(getmod(m, x.args[1]), x.args[2].value)
@@ -114,7 +120,7 @@ The optional keyword arguments are used to add new data to the `DocStr`'s
`.data` dictionary.
"""
function docstr(md::Markdown.MD; kws...)
- data = Dict{Symbol, Any}(
+ data = Dict{Symbol,Any}(
:path => md.meta[:path],
:module => md.meta[:module],
:linenumber => 0,
@@ -146,7 +152,7 @@ function convertmeta(meta::IdDict{Any,Any})
if !haskey(CACHED, meta)
docs = IdDict{Any,Any}()
for (k, v) in meta
- if !isa(k, Union{Number, AbstractString, IdDict{Any,Any}})
+ if !isa(k, Union{Number,AbstractString,IdDict{Any,Any}})
docs[binding(k)] = multidoc(v)
end
end
@@ -171,11 +177,11 @@ Find all `DocStr` objects that match the provided arguments exactly.
Return a `Vector{DocStr}` ordered by definition order.
"""
function getspecificdocs(
- binding::Docs.Binding,
- typesig::Type = Union{},
- compare = (==),
- modules = Docs.modules,
- )
+ binding::Docs.Binding,
+ typesig::Type=Union{},
+ compare=(==),
+ modules=Docs.modules,
+)
# Fall back to searching all modules if user provides no modules.
modules = isempty(modules) ? Docs.modules : modules
# Keywords are special-cased within the docsystem. Handle those first.
@@ -208,12 +214,12 @@ That is, if [`getspecificdocs`](@ref) fails, get docs for aliases of
try getting docs for `<:`.
"""
function getdocs(
- binding::Docs.Binding,
- typesig::Type = Union{};
- compare = (==),
- modules = Docs.modules,
- aliases = true,
- )
+ binding::Docs.Binding,
+ typesig::Type=Union{};
+ compare=(==),
+ modules=Docs.modules,
+ aliases=true,
+)
# First, we try to find the docs that _exactly_ match the binding. If you
# have aliases, you can have a separate docstring attached to the alias.
results = getspecificdocs(binding, typesig, compare, modules)
@@ -244,7 +250,7 @@ searching for the `Binding` in the docsystem.
Note that when conversion fails this method returns an empty `Vector{DocStr}`.
"""
-function getdocs(object::Any, typesig::Type = Union{}; kws...)
+function getdocs(object::Any, typesig::Type=Union{}; kws...)
binding = aliasof(object, object)
binding === object ? DocStr[] : getdocs(binding, typesig; kws...)
end
@@ -287,7 +293,7 @@ in case `Base.Docs.parsedoc` fails with an exception.
"""
function parsedoc(docstr::DocStr)
md = try
- Base.Docs.parsedoc(docstr) :: Markdown.MD
+ Base.Docs.parsedoc(docstr)::Markdown.MD
catch exception
@error """
parsedoc failed to parse a docstring into Markdown. This indicates a problem with the docstring.
diff --git a/src/Documenter.jl b/src/Documenter.jl
index 3c9e066714..0c9d56ebf7 100644
--- a/src/Documenter.jl
+++ b/src/Documenter.jl
@@ -38,15 +38,24 @@ const DOCUMENTER_VERSION = let
end
# Potentially sensitive variables to be removed from environment when not needed
-const NO_KEY_ENV = Dict(
- "DOCUMENTER_KEY" => nothing,
- "DOCUMENTER_KEY_PREVIEWS" => nothing,
-)
+const NO_KEY_ENV = Dict("DOCUMENTER_KEY" => nothing, "DOCUMENTER_KEY_PREVIEWS" => nothing,)
# Names of possible internal errors
-const ERROR_NAMES = [:autodocs_block, :cross_references, :docs_block, :doctest,
- :eval_block, :example_block, :footnote, :linkcheck_remotes, :linkcheck,
- :meta_block, :missing_docs, :parse_error, :setup_block]
+const ERROR_NAMES = [
+ :autodocs_block,
+ :cross_references,
+ :docs_block,
+ :doctest,
+ :eval_block,
+ :example_block,
+ :footnote,
+ :linkcheck_remotes,
+ :linkcheck,
+ :meta_block,
+ :missing_docs,
+ :parse_error,
+ :setup_block
+]
"""
abstract type Plugin end
@@ -97,8 +106,8 @@ import .LaTeXWriter: LaTeX
# User Interface.
# ---------------
-export makedocs, deploydocs, hide, doctest, DocMeta, asset, Remotes,
- KaTeX, MathJax, MathJax2, MathJax3
+export makedocs,
+ deploydocs, hide, doctest, DocMeta, asset, Remotes, KaTeX, MathJax, MathJax2, MathJax3
include("makedocs.jl")
include("deployconfig.jl")
diff --git a/src/anchors.jl b/src/anchors.jl
index 35fe92797f..573e1c1a3b 100644
--- a/src/anchors.jl
+++ b/src/anchors.jl
@@ -24,7 +24,7 @@ mutable struct Anchor
nth :: Int
# Reverse-lookup of .object for MarkdownAST trees. This is intentionally
# uninitialized until set in Documenter.markdownast()
- node :: MarkdownAST.Node{Nothing}
+ node::MarkdownAST.Node{Nothing}
Anchor(object) = new(object, 0, "", "", 1)
end
@@ -38,8 +38,8 @@ Tree structure representing anchors in a document and their relationships with e
Each `id` maps to a `file` which in turn maps to a vector of `Anchor` objects.
"""
mutable struct AnchorMap
- map :: Dict{String, Dict{String, Vector{Anchor}}}
- count :: Int
+ map::Dict{String,Dict{String,Vector{Anchor}}}
+ count::Int
AnchorMap() = new(Dict(), 0)
end
@@ -55,7 +55,7 @@ Either an actual [`Anchor`](@ref) object may be provided or any other object whi
automatically wrapped in an [`Anchor`](@ref) before being added to the [`AnchorMap`](@ref).
"""
function anchor_add!(m::AnchorMap, anchor::Anchor, id, file)
- filemap = get!(m.map, id, Dict{String, Vector{Anchor}}())
+ filemap = get!(m.map, id, Dict{String,Vector{Anchor}}())
anchors = get!(filemap, file, Anchor[])
push!(anchors, anchor)
anchor.order = m.count += 1
@@ -75,9 +75,10 @@ $(SIGNATURES)
Does the given `id` exist within the [`AnchorMap`](@ref)? A `file` and integer `n` may also
be provided to narrow the search for existence.
"""
-anchor_exists(m::AnchorMap, id, file, n) = anchor_exists(m, id, file) && 1 ≤ n ≤ length(m.map[id][file])
-anchor_exists(m::AnchorMap, id, file) = anchor_exists(m, id) && haskey(m.map[id], file)
-anchor_exists(m::AnchorMap, id) = haskey(m.map, id)
+anchor_exists(m::AnchorMap, id, file, n) =
+ anchor_exists(m, id, file) && 1 ≤ n ≤ length(m.map[id][file])
+anchor_exists(m::AnchorMap, id, file) = anchor_exists(m, id) && haskey(m.map[id], file)
+anchor_exists(m::AnchorMap, id) = haskey(m.map, id)
# Anchor uniqueness.
# ------------------
@@ -89,12 +90,11 @@ Is the `id` unique within the given [`AnchorMap`](@ref)? May also specify the `f
"""
function anchor_isunique(m::AnchorMap, id)
anchor_exists(m, id) &&
- length(m.map[id]) === 1 &&
- anchor_isunique(m, id, first(first(m.map[id])))
+ length(m.map[id]) === 1 &&
+ anchor_isunique(m, id, first(first(m.map[id])))
end
function anchor_isunique(m::AnchorMap, id, file)
- anchor_exists(m, id, file) &&
- length(m.map[id][file]) === 1
+ anchor_exists(m, id, file) && length(m.map[id][file]) === 1
end
# Get anchor.
@@ -107,19 +107,13 @@ Returns the [`Anchor`](@ref) object matching `id`. `file` and `n` may also be pr
`Anchor` is returned, or `nothing` in case of no match.
"""
function anchor(m::AnchorMap, id)
- anchor_isunique(m, id) ?
- anchor(m, id, first(first(m.map[id])), 1) :
- nothing
+ anchor_isunique(m, id) ? anchor(m, id, first(first(m.map[id])), 1) : nothing
end
function anchor(m::AnchorMap, id, file)
- anchor_isunique(m, id, file) ?
- anchor(m, id, file, 1) :
- nothing
+ anchor_isunique(m, id, file) ? anchor(m, id, file, 1) : nothing
end
function anchor(m::AnchorMap, id, file, n)
- anchor_exists(m, id, file, n) ?
- m.map[id][file][n] :
- nothing
+ anchor_exists(m, id, file, n) ? m.map[id][file][n] : nothing
end
"""
diff --git a/src/builder_pipeline.jl b/src/builder_pipeline.jl
index af98526ae3..7bcd0e8f8f 100644
--- a/src/builder_pipeline.jl
+++ b/src/builder_pipeline.jl
@@ -6,57 +6,57 @@ These actions may involve creating directory structures, expanding templates, ru
doctests, etc.
"""
module Builder
- import ..Documenter
- import ..Documenter.Selectors
-
- """
- The default document processing "pipeline", which consists of the following actions:
-
- - [`SetupBuildDirectory`](@ref)
- - [`Doctest`](@ref)
- - [`ExpandTemplates`](@ref)
- - [`CheckDocument`](@ref)
- - [`Populate`](@ref)
- - [`RenderDocument`](@ref)
-
- """
- abstract type DocumentPipeline <: Selectors.AbstractSelector end
-
- """
- Creates the correct directory layout within the `build` folder and parses markdown files.
- """
- abstract type SetupBuildDirectory <: DocumentPipeline end
-
- """
- Runs all the doctests in all docstrings and Markdown files.
- """
- abstract type Doctest <: DocumentPipeline end
-
- """
- Executes a sequence of actions on each node of the parsed markdown files in turn.
- """
- abstract type ExpandTemplates <: DocumentPipeline end
-
- """
- Finds and sets URLs for each `@ref` link in the document to the correct destinations.
- """
- abstract type CrossReferences <: DocumentPipeline end
-
- """
- Checks that all documented objects are included in the document and runs doctests on all
- valid Julia code blocks.
- """
- abstract type CheckDocument <: DocumentPipeline end
-
- """
- Populates the `ContentsNode`s and `IndexNode`s with links.
- """
- abstract type Populate <: DocumentPipeline end
-
- """
- Writes the document tree to the `build` directory.
- """
- abstract type RenderDocument <: DocumentPipeline end
+import ..Documenter
+import ..Documenter.Selectors
+
+"""
+The default document processing "pipeline", which consists of the following actions:
+
+- [`SetupBuildDirectory`](@ref)
+- [`Doctest`](@ref)
+- [`ExpandTemplates`](@ref)
+- [`CheckDocument`](@ref)
+- [`Populate`](@ref)
+- [`RenderDocument`](@ref)
+
+"""
+abstract type DocumentPipeline <: Selectors.AbstractSelector end
+
+"""
+Creates the correct directory layout within the `build` folder and parses markdown files.
+"""
+abstract type SetupBuildDirectory <: DocumentPipeline end
+
+"""
+Runs all the doctests in all docstrings and Markdown files.
+"""
+abstract type Doctest <: DocumentPipeline end
+
+"""
+Executes a sequence of actions on each node of the parsed markdown files in turn.
+"""
+abstract type ExpandTemplates <: DocumentPipeline end
+
+"""
+Finds and sets URLs for each `@ref` link in the document to the correct destinations.
+"""
+abstract type CrossReferences <: DocumentPipeline end
+
+"""
+Checks that all documented objects are included in the document and runs doctests on all
+valid Julia code blocks.
+"""
+abstract type CheckDocument <: DocumentPipeline end
+
+"""
+Populates the `ContentsNode`s and `IndexNode`s with links.
+"""
+abstract type Populate <: DocumentPipeline end
+
+"""
+Writes the document tree to the `build` directory.
+"""
+abstract type RenderDocument <: DocumentPipeline end
end
Selectors.order(::Type{Builder.SetupBuildDirectory}) = 1.0
@@ -67,15 +67,16 @@ Selectors.order(::Type{Builder.CheckDocument}) = 4.0
Selectors.order(::Type{Builder.Populate}) = 5.0
Selectors.order(::Type{Builder.RenderDocument}) = 6.0
-Selectors.matcher(::Type{T}, doc::Documenter.Document) where {T <: Builder.DocumentPipeline} = true
+Selectors.matcher(::Type{T}, doc::Documenter.Document) where {T<:Builder.DocumentPipeline} =
+ true
-Selectors.strict(::Type{T}) where {T <: Builder.DocumentPipeline} = false
+Selectors.strict(::Type{T}) where {T<:Builder.DocumentPipeline} = false
function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.Document)
@info "SetupBuildDirectory: setting up build directory."
# Frequently used fields.
- build = doc.user.build
+ build = doc.user.build
source = doc.user.source
workdir = doc.user.workdir
@@ -84,7 +85,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D
# We create the .user.build directory.
# If .user.clean is set, we first clean the existing directory.
- doc.user.clean && isdir(build) && rm(build; recursive = true)
+ doc.user.clean && isdir(build) && rm(build; recursive=true)
isdir(build) || mkpath(build)
# We'll walk over all the files in the .user.source directory.
@@ -117,7 +118,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D
push!(mdpages, Documenter.srcpath(source, root, file))
Documenter.addpage!(doc, src, dst, wd)
else
- cp(src, dst; force = true)
+ cp(src, dst; force=true)
end
end
end
@@ -136,7 +137,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D
# Finally we populate the .next and .prev fields of the navnodes that point
# to actual pages.
- local prev::Union{Documenter.NavNode, Nothing} = nothing
+ local prev::Union{Documenter.NavNode,Nothing} = nothing
for navnode in doc.internal.navlist
navnode.prev = prev
if prev !== nothing
@@ -163,8 +164,8 @@ string sorting, except for prioritizing `index.md` (i.e. `index.md` always comes
"""
function lt_page(a, b)
# note: length("index.md") == 8
- a = endswith(a, "index.md") ? chop(a; tail = 8) : a
- b = endswith(b, "index.md") ? chop(b; tail = 8) : b
+ a = endswith(a, "index.md") ? chop(a; tail=8) : a
+ b = endswith(b, "index.md") ? chop(b; tail=8) : b
return a < b
end
@@ -196,11 +197,14 @@ function walk_navpages(hps::Tuple, parent, doc)
walk_navpages(hps..., parent, doc)
end
-walk_navpages(title::String, children::Vector, parent, doc) = walk_navpages(true, title, nothing, children, parent, doc)
-walk_navpages(title::String, page, parent, doc) = walk_navpages(true, title, page, [], parent, doc)
+walk_navpages(title::String, children::Vector, parent, doc) =
+ walk_navpages(true, title, nothing, children, parent, doc)
+walk_navpages(title::String, page, parent, doc) =
+ walk_navpages(true, title, page, [], parent, doc)
walk_navpages(p::Pair, parent, doc) = walk_navpages(p.first, p.second, parent, doc)
-walk_navpages(ps::Vector, parent, doc) = [walk_navpages(p, parent, doc)::Documenter.NavNode for p in ps]
+walk_navpages(ps::Vector, parent, doc) =
+ [walk_navpages(p, parent, doc)::Documenter.NavNode for p in ps]
walk_navpages(src::String, parent, doc) = walk_navpages(true, nothing, src, [], parent, doc)
function Selectors.runner(::Type{Builder.Doctest}, doc::Documenter.Document)
@@ -209,7 +213,9 @@ function Selectors.runner(::Type{Builder.Doctest}, doc::Documenter.Document)
_doctest(doc.blueprint, doc)
num_errors = length(doc.internal.errors)
if (doc.user.doctest === :only || is_strict(doc, :doctest)) && num_errors > 0
- error("`makedocs` encountered $(num_errors > 1 ? "$(num_errors) doctest errors" : "a doctest error"). Terminating build")
+ error(
+ "`makedocs` encountered $(num_errors > 1 ? "$(num_errors) doctest errors" : "a doctest error"). Terminating build"
+ )
end
else
@info "Doctest: skipped."
@@ -250,9 +256,11 @@ function Selectors.runner(::Type{Builder.RenderDocument}, doc::Documenter.Docume
fatal_errors = filter(is_strict(doc), doc.internal.errors)
c = length(fatal_errors)
if c > 0
- error("`makedocs` encountered $(c > 1 ? "errors" : "an error") ["
- * join(Ref(":") .* string.(fatal_errors), ", ")
- * "] -- terminating build before rendering.")
+ error(
+ "`makedocs` encountered $(c > 1 ? "errors" : "an error") [" *
+ join(Ref(":") .* string.(fatal_errors), ", ") *
+ "] -- terminating build before rendering."
+ )
else
@info "RenderDocument: rendering document."
Documenter.render(doc)
diff --git a/src/cross_references.jl b/src/cross_references.jl
index e31b0d6284..683840e732 100644
--- a/src/cross_references.jl
+++ b/src/cross_references.jl
@@ -41,7 +41,7 @@ function crossref(doc::Documenter.Document, page, mdast::MarkdownAST.Node)
end
function local_links!(node::MarkdownAST.Node, meta, page, doc)
- @assert node.element isa Union{MarkdownAST.Link, MarkdownAST.Image}
+ @assert node.element isa Union{MarkdownAST.Link,MarkdownAST.Image}
link_url = node.element.destination
# If the link is an absolute URL, then there's nothing we need to do. We'll just
# keep the Link object as is, and it should become an external hyperlink in the writer.
@@ -59,9 +59,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc)
if isempty(path)
if node.element isa MarkdownAST.Image
@docerror(
- doc, :cross_references,
+ doc,
+ :cross_references,
"invalid local image: path missing in $(Documenter.locrepr(page.source))",
- link=node
+ link = node
)
return
end
@@ -69,9 +70,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc)
return
elseif Sys.iswindows() && ':' in path
@docerror(
- doc, :cross_references,
+ doc,
+ :cross_references,
"invalid local link/image: colons not allowed in paths on Windows in $(Documenter.locrepr(page.source))",
- link=node
+ link = node
)
return
end
@@ -80,9 +82,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc)
path = normpath(joinpath(dirname(Documenter.pagekey(doc, page)), path))
if startswith(path, "..")
@docerror(
- doc, :cross_references,
+ doc,
+ :cross_references,
"invalid local link/image: path pointing to a file outside of build directory in $(Documenter.locrepr(page.source))",
- link=node
+ link = node
)
return
elseif path in keys(doc.blueprint.pages)
@@ -95,9 +98,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc)
if node.element isa MarkdownAST.Image
if !isempty(fragment)
@docerror(
- doc, :cross_references,
+ doc,
+ :cross_references,
"invalid local image: path contains a fragment in $(Documenter.locrepr(page.source))",
- link=node
+ link = node
)
end
node.element = Documenter.LocalImage(path)
@@ -107,9 +111,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc)
return
else
@docerror(
- doc, :cross_references,
+ doc,
+ :cross_references,
"invalid local link/image: file does not exist in $(Documenter.locrepr(page.source))",
- link=node
+ link = node
)
return
end
@@ -160,7 +165,7 @@ function xref(node::MarkdownAST.Node, meta, page, doc)
# so that we wouldn't have to duplicate the @docerror call
namedxref(node, slug, meta, page, doc)
else
- docsxref(node, slug, meta, page, doc; docref = docref)
+ docsxref(node, slug, meta, page, doc; docref=docref)
end
end
@@ -189,7 +194,8 @@ function basicxref(node::MarkdownAST.Node, meta, page, doc)
end
end
md = convert(Markdown.MD, ast)
- text = strip(sprint(Markdown.plain, Markdown.Paragraph(md.content[1].content[1].text)))
+ text =
+ strip(sprint(Markdown.plain, Markdown.Paragraph(md.content[1].content[1].text)))
if occursin(r"#[0-9]+", text)
issue_xref(node, lstrip(text, '#'), meta, page, doc)
else
@@ -217,17 +223,32 @@ function namedxref(node::MarkdownAST.Node, slug, meta, page, doc)
page = doc.blueprint.pages[pagekey]
node.element = Documenter.PageLink(page, anchor_label(anchor))
else
- @docerror(doc, :cross_references, "'$slug' is not unique in $(Documenter.locrepr(page.source)).")
+ @docerror(
+ doc,
+ :cross_references,
+ "'$slug' is not unique in $(Documenter.locrepr(page.source))."
+ )
end
else
- @docerror(doc, :cross_references, "reference for '$slug' could not be found in $(Documenter.locrepr(page.source)).")
+ @docerror(
+ doc,
+ :cross_references,
+ "reference for '$slug' could not be found in $(Documenter.locrepr(page.source))."
+ )
end
end
# Cross referencing docstrings.
# -----------------------------
-function docsxref(node::MarkdownAST.Node, code, meta, page, doc; docref = find_docref(code, meta, page))
+function docsxref(
+ node::MarkdownAST.Node,
+ code,
+ meta,
+ page,
+ doc;
+ docref=find_docref(code, meta, page)
+)
@assert node.element isa MarkdownAST.Link
# Add the link to list of local uncheck links.
doc.internal.locallinks[node.element] = node.element.destination
@@ -249,7 +270,11 @@ function docsxref(node::MarkdownAST.Node, code, meta, page, doc; docref = find_d
page = doc.blueprint.pages[pagekey]
node.element = Documenter.PageLink(page, slug)
else
- @docerror(doc, :cross_references, "no doc found for reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).")
+ @docerror(
+ doc,
+ :cross_references,
+ "no doc found for reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source))."
+ )
end
end
@@ -264,7 +289,10 @@ function find_docref(code, meta, page)
ex = Meta.parse(code)
catch err
isa(err, Meta.ParseError) || rethrow(err)
- return (error = "unable to parse the reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).", exception = nothing)
+ return (
+ error="unable to parse the reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).",
+ exception=nothing
+ )
end
end
mod = get(meta, :CurrentModule, Main)
@@ -275,8 +303,8 @@ function find_docref(code, meta, page)
binding = Documenter.DocSystem.binding(mod, ex)
catch err
return (
- error = "unable to get the binding for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)",
- exception = (err, catch_backtrace()),
+ error="unable to get the binding for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)",
+ exception=(err, catch_backtrace()),
)
return
end
@@ -286,13 +314,13 @@ function find_docref(code, meta, page)
typesig = Core.eval(mod, Documenter.DocSystem.signature(ex, rstrip(code)))
catch err
return (
- error = "unable to evaluate the type signature for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)",
- exception = (err, catch_backtrace()),
+ error="unable to evaluate the type signature for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)",
+ exception=(err, catch_backtrace()),
)
return
end
- return (binding = binding, typesig = typesig)
+ return (binding=binding, typesig=typesig)
end
"""
@@ -335,7 +363,7 @@ function find_object(binding, typesig)
return Documenter.Object(binding, typesig)
end
end
-function find_object(λ::Union{Function, DataType}, binding, typesig)
+function find_object(λ::Union{Function,DataType}, binding, typesig)
if hasmethod(λ, typesig)
signature = getsig(λ, typesig)
return Documenter.Object(binding, signature)
@@ -343,10 +371,11 @@ function find_object(λ::Union{Function, DataType}, binding, typesig)
return Documenter.Object(binding, typesig)
end
end
-find_object(::Union{Function, DataType}, binding, ::Union{Union,Type{Union{}}}) = Documenter.Object(binding, Union{})
+find_object(::Union{Function,DataType}, binding, ::Union{Union,Type{Union{}}}) =
+ Documenter.Object(binding, Union{})
find_object(other, binding, typesig) = Documenter.Object(binding, typesig)
-getsig(λ::Union{Function, DataType}, typesig) = Base.tuple_type_tail(which(λ, typesig).sig)
+getsig(λ::Union{Function,DataType}, typesig) = Base.tuple_type_tail(which(λ, typesig).sig)
# Issues/PRs cross referencing.
@@ -355,9 +384,14 @@ getsig(λ::Union{Function, DataType}, typesig) = Base.tuple_type_tail(which(λ,
function issue_xref(node::MarkdownAST.Node, num, meta, page, doc)
@assert node.element isa MarkdownAST.Link
# Update issue links starting with a hash, but only if our Remote supports it
- issue_url = isnothing(doc.user.remote) ? nothing : Remotes.issueurl(doc.user.remote, num)
+ issue_url =
+ isnothing(doc.user.remote) ? nothing : Remotes.issueurl(doc.user.remote, num)
if isnothing(issue_url)
- @docerror(doc, :cross_references, "unable to generate issue reference for '[`#$num`](@ref)' in $(Documenter.locrepr(page.source)).")
+ @docerror(
+ doc,
+ :cross_references,
+ "unable to generate issue reference for '[`#$num`](@ref)' in $(Documenter.locrepr(page.source))."
+ )
else
node.element.destination = issue_url
end
diff --git a/src/deployconfig.jl b/src/deployconfig.jl
index 6c10795759..3c6787a281 100644
--- a/src/deployconfig.jl
+++ b/src/deployconfig.jl
@@ -70,11 +70,11 @@ and `devurl` arguments from [`deploydocs`](@ref).
"""
function deploy_folder(cfg::DeployConfig; kwargs...)
@warn "Documenter.deploy_folder(::$(typeof(cfg)); kwargs...) not implemented. Skipping deployment."
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
function deploy_folder(::Nothing; kwargs...)
@warn "Documenter could not auto-detect the building environment Skipping deployment."
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
@enum AuthenticationMethod SSH HTTPS
@@ -151,26 +151,33 @@ struct Travis <: DeployConfig
travis_event_type::String
end
function Travis()
- travis_branch = get(ENV, "TRAVIS_BRANCH", "")
- travis_pull_request = get(ENV, "TRAVIS_PULL_REQUEST", "")
- travis_repo_slug = get(ENV, "TRAVIS_REPO_SLUG", "")
- travis_tag = get(ENV, "TRAVIS_TAG", "")
- travis_event_type = get(ENV, "TRAVIS_EVENT_TYPE", "")
- return Travis(travis_branch, travis_pull_request,
- travis_repo_slug, travis_tag, travis_event_type)
+ travis_branch = get(ENV, "TRAVIS_BRANCH", "")
+ travis_pull_request = get(ENV, "TRAVIS_PULL_REQUEST", "")
+ travis_repo_slug = get(ENV, "TRAVIS_REPO_SLUG", "")
+ travis_tag = get(ENV, "TRAVIS_TAG", "")
+ travis_event_type = get(ENV, "TRAVIS_EVENT_TYPE", "")
+ return Travis(
+ travis_branch,
+ travis_pull_request,
+ travis_repo_slug,
+ travis_tag,
+ travis_event_type
+ )
end
# Check criteria for deployment
-function deploy_folder(cfg::Travis;
- repo,
- repo_previews = repo,
- branch = "gh-pages",
- branch_previews = branch,
- devbranch,
- push_preview,
- devurl,
- tag_prefix = "",
- kwargs...)
+function deploy_folder(
+ cfg::Travis;
+ repo,
+ repo_previews=repo,
+ branch="gh-pages",
+ branch_previews=branch,
+ devbranch,
+ push_preview,
+ devurl,
+ tag_prefix="",
+ kwargs...
+)
io = IOBuffer()
all_ok = true
## Determine build type; release, devbranch or preview
@@ -185,17 +192,26 @@ function deploy_folder(cfg::Travis;
## The deploydocs' repo should match TRAVIS_REPO_SLUG
repo_ok = occursin(cfg.travis_repo_slug, repo)
all_ok &= repo_ok
- println(io, "- $(marker(repo_ok)) ENV[\"TRAVIS_REPO_SLUG\"]=\"$(cfg.travis_repo_slug)\" occurs in repo=\"$(repo)\"")
+ println(
+ io,
+ "- $(marker(repo_ok)) ENV[\"TRAVIS_REPO_SLUG\"]=\"$(cfg.travis_repo_slug)\" occurs in repo=\"$(repo)\""
+ )
if build_type === :release
## Do not deploy for PRs
pr_ok = cfg.travis_pull_request == "false"
- println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"")
+ println(
+ io,
+ "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\""
+ )
all_ok &= pr_ok
tag_nobuild = version_tag_strip_build(cfg.travis_tag; tag_prefix)
## If a tag exist it should be a valid VersionNumber
tag_ok = tag_nobuild !== nothing
all_ok &= tag_ok
- println(io, "- $(marker(tag_ok)) ENV[\"TRAVIS_TAG\"] contains a valid VersionNumber")
+ println(
+ io,
+ "- $(marker(tag_ok)) ENV[\"TRAVIS_TAG\"] contains a valid VersionNumber"
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -204,12 +220,18 @@ function deploy_folder(cfg::Travis;
elseif build_type === :devbranch
## Do not deploy for PRs
pr_ok = cfg.travis_pull_request == "false"
- println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"")
+ println(
+ io,
+ "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\""
+ )
all_ok &= pr_ok
## deploydocs' devbranch should match TRAVIS_BRANCH
branch_ok = !isempty(cfg.travis_tag) || cfg.travis_branch == devbranch
all_ok &= branch_ok
- println(io, "- $(marker(branch_ok)) ENV[\"TRAVIS_BRANCH\"] matches devbranch=\"$(devbranch)\"")
+ println(
+ io,
+ "- $(marker(branch_ok)) ENV[\"TRAVIS_BRANCH\"] matches devbranch=\"$(devbranch)\""
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -219,10 +241,16 @@ function deploy_folder(cfg::Travis;
pr_number = tryparse(Int, cfg.travis_pull_request)
pr_ok = pr_number !== nothing
all_ok &= pr_ok
- println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is a number")
+ println(
+ io,
+ "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is a number"
+ )
btype_ok = push_preview
all_ok &= btype_ok
- println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`")
+ println(
+ io,
+ "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`"
+ )
deploy_branch = branch_previews
deploy_repo = repo_previews
is_preview = true
@@ -236,10 +264,16 @@ function deploy_folder(cfg::Travis;
## Cron jobs should not deploy
type_ok = cfg.travis_event_type != "cron"
all_ok &= type_ok
- println(io, "- $(marker(type_ok)) ENV[\"TRAVIS_EVENT_TYPE\"]=\"$(cfg.travis_event_type)\" is not \"cron\"")
+ println(
+ io,
+ "- $(marker(type_ok)) ENV[\"TRAVIS_EVENT_TYPE\"]=\"$(cfg.travis_event_type)\" is not \"cron\""
+ )
print(io, "Deploying: $(marker(all_ok))")
@info String(take!(io))
- if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.travis_branch == "main"
+ if build_type === :devbranch &&
+ !branch_ok &&
+ devbranch == "master" &&
+ cfg.travis_branch == "main"
@warn """
Possible deploydocs() misconfiguration: main vs master
Documenter's configured primary development branch (`devbranch`) is "master", but the
@@ -253,13 +287,15 @@ function deploy_folder(cfg::Travis;
"""
end
if all_ok
- return DeployDecision(; all_ok = true,
- branch = deploy_branch,
- is_preview = is_preview,
- repo = deploy_repo,
- subfolder = subfolder)
+ return DeployDecision(;
+ all_ok=true,
+ branch=deploy_branch,
+ is_preview=is_preview,
+ repo=deploy_repo,
+ subfolder=subfolder
+ )
else
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
end
@@ -298,21 +334,23 @@ end
function GitHubActions()
github_repository = get(ENV, "GITHUB_REPOSITORY", "") # "JuliaDocs/Documenter.jl"
github_event_name = get(ENV, "GITHUB_EVENT_NAME", "") # "push", "pull_request" or "cron" (?)
- github_ref = get(ENV, "GITHUB_REF", "") # "refs/heads/$(branchname)" for branch, "refs/tags/$(tagname)" for tags
+ github_ref = get(ENV, "GITHUB_REF", "") # "refs/heads/$(branchname)" for branch, "refs/tags/$(tagname)" for tags
return GitHubActions(github_repository, github_event_name, github_ref)
end
# Check criteria for deployment
-function deploy_folder(cfg::GitHubActions;
- repo,
- repo_previews = repo,
- branch = "gh-pages",
- branch_previews = branch,
- devbranch,
- push_preview,
- devurl,
- tag_prefix = "",
- kwargs...)
+function deploy_folder(
+ cfg::GitHubActions;
+ repo,
+ repo_previews=repo,
+ branch="gh-pages",
+ branch_previews=branch,
+ devbranch,
+ push_preview,
+ devurl,
+ tag_prefix="",
+ kwargs...
+)
io = IOBuffer()
all_ok = true
## Determine build type
@@ -323,22 +361,34 @@ function deploy_folder(cfg::GitHubActions;
else
build_type = :devbranch
end
- println(io, "Deployment criteria for deploying $(build_type) build from GitHub Actions:")
+ println(
+ io,
+ "Deployment criteria for deploying $(build_type) build from GitHub Actions:"
+ )
## The deploydocs' repo should match GITHUB_REPOSITORY
repo_ok = occursin(cfg.github_repository, repo)
all_ok &= repo_ok
- println(io, "- $(marker(repo_ok)) ENV[\"GITHUB_REPOSITORY\"]=\"$(cfg.github_repository)\" occurs in repo=\"$(repo)\"")
+ println(
+ io,
+ "- $(marker(repo_ok)) ENV[\"GITHUB_REPOSITORY\"]=\"$(cfg.github_repository)\" occurs in repo=\"$(repo)\""
+ )
if build_type === :release
## Do not deploy for PRs
event_ok = in(cfg.github_event_name, ["push", "workflow_dispatch", "schedule"])
all_ok &= event_ok
- println(io, "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"")
+ println(
+ io,
+ "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\""
+ )
## If a tag exist it should be a valid VersionNumber
m = match(r"^refs\/tags\/(.*)$", cfg.github_ref)
tag_nobuild = version_tag_strip_build(m.captures[1]; tag_prefix)
tag_ok = tag_nobuild !== nothing
all_ok &= tag_ok
- println(io, "- $(marker(tag_ok)) ENV[\"GITHUB_REF\"]=\"$(cfg.github_ref)\" contains a valid VersionNumber")
+ println(
+ io,
+ "- $(marker(tag_ok)) ENV[\"GITHUB_REF\"]=\"$(cfg.github_ref)\" contains a valid VersionNumber"
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -348,12 +398,18 @@ function deploy_folder(cfg::GitHubActions;
## Do not deploy for PRs
event_ok = in(cfg.github_event_name, ["push", "workflow_dispatch", "schedule"])
all_ok &= event_ok
- println(io, "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"")
+ println(
+ io,
+ "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\""
+ )
## deploydocs' devbranch should match the current branch
m = match(r"^refs\/heads\/(.*)$", cfg.github_ref)
branch_ok = m === nothing ? false : String(m.captures[1]) == devbranch
all_ok &= branch_ok
- println(io, "- $(marker(branch_ok)) ENV[\"GITHUB_REF\"] matches devbranch=\"$(devbranch)\"")
+ println(
+ io,
+ "- $(marker(branch_ok)) ENV[\"GITHUB_REF\"] matches devbranch=\"$(devbranch)\""
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -366,13 +422,20 @@ function deploy_folder(cfg::GitHubActions;
all_ok &= pr_ok
println(io, "- $(marker(pr_ok)) ENV[\"GITHUB_REF\"] corresponds to a PR number")
if pr_ok
- pr_origin_matches_repo = verify_github_pull_repository(cfg.github_repository, pr_number)
+ pr_origin_matches_repo =
+ verify_github_pull_repository(cfg.github_repository, pr_number)
all_ok &= pr_origin_matches_repo
- println(io, "- $(marker(pr_origin_matches_repo)) PR originates from the same repository")
+ println(
+ io,
+ "- $(marker(pr_origin_matches_repo)) PR originates from the same repository"
+ )
end
btype_ok = push_preview
all_ok &= btype_ok
- println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`")
+ println(
+ io,
+ "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`"
+ )
deploy_branch = branch_previews
deploy_repo = repo_previews
is_preview = true
@@ -393,11 +456,17 @@ function deploy_folder(cfg::GitHubActions;
elseif token_ok
println(io, "- $(marker(token_ok)) ENV[\"GITHUB_TOKEN\"] exists and is non-empty")
else
- println(io, "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"GITHUB_TOKEN\"] exists and is non-empty")
+ println(
+ io,
+ "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"GITHUB_TOKEN\"] exists and is non-empty"
+ )
end
print(io, "Deploying: $(marker(all_ok))")
@info String(take!(io))
- if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.github_ref == "refs/heads/main"
+ if build_type === :devbranch &&
+ !branch_ok &&
+ devbranch == "master" &&
+ cfg.github_ref == "refs/heads/main"
@warn """
Possible deploydocs() misconfiguration: main vs master
Documenter's configured primary development branch (`devbranch`) is "master", but the
@@ -411,13 +480,15 @@ function deploy_folder(cfg::GitHubActions;
"""
end
if all_ok
- return DeployDecision(; all_ok = true,
- branch = deploy_branch,
- is_preview = is_preview,
- repo = deploy_repo,
- subfolder = subfolder)
+ return DeployDecision(;
+ all_ok=true,
+ branch=deploy_branch,
+ is_preview=is_preview,
+ repo=deploy_repo,
+ subfolder=subfolder
+ )
else
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
end
@@ -454,7 +525,7 @@ function post_status(::GitHubActions; type, repo::String, subfolder=nothing, kwa
if haskey(event, "pull_request") &&
haskey(event["pull_request"], "head") &&
haskey(event["pull_request"]["head"], "sha")
- sha = event["pull_request"]["head"]["sha"]
+ sha = event["pull_request"]["head"]["sha"]
end
elseif get(ENV, "GITHUB_EVENT_NAME", nothing) == "push"
sha = get(ENV, "GITHUB_SHA", nothing)
@@ -466,7 +537,12 @@ function post_status(::GitHubActions; type, repo::String, subfolder=nothing, kwa
end
end
-function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothing) where S <: String
+function post_github_status(
+ type::S,
+ deploydocs_repo::S,
+ sha::S,
+ subfolder=nothing
+) where {S<:String}
try
Sys.which("curl") === nothing && return
## Extract owner and repository name
@@ -483,7 +559,7 @@ function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothi
push!(cmd.exec, "-H", "Authorization: token $(auth)")
push!(cmd.exec, "-H", "User-Agent: Documenter.jl")
push!(cmd.exec, "-H", "Content-Type: application/json")
- json = Dict{String,Any}("context" => "documenter/deploy", "state"=>type)
+ json = Dict{String,Any}("context" => "documenter/deploy", "state" => type)
if type == "pending"
json["description"] = "Documentation build in progress"
elseif type == "success"
@@ -505,7 +581,7 @@ function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothi
# Run the command (silently)
io = IOBuffer()
res = run(pipeline(cmd; stdout=io, stderr=devnull))
- @debug "Response of curl POST request" response=String(take!(io))
+ @debug "Response of curl POST request" response = String(take!(io))
catch
@debug "Failed to post status"
end
@@ -540,9 +616,9 @@ end
function run_and_capture(cmd)
stdout_buffer, stderr_buffer = IOBuffer(), IOBuffer()
- run(pipeline(cmd; stdout = stdout_buffer, stderr = stderr_buffer))
+ run(pipeline(cmd; stdout=stdout_buffer, stderr=stderr_buffer))
stdout, stderr = String(take!(stdout_buffer)), String(take!(stderr_buffer))
- return (; stdout = stdout, stderr = stderr)
+ return (; stdout=stdout, stderr=stderr)
end
##########
@@ -597,13 +673,13 @@ end
function deploy_folder(
cfg::GitLab;
repo,
- repo_previews = repo,
+ repo_previews=repo,
devbranch,
push_preview,
devurl,
- branch = "gh-pages",
- branch_previews = branch,
- tag_prefix = "",
+ branch="gh-pages",
+ branch_previews=branch,
+ tag_prefix="",
kwargs...,
)
io = IOBuffer()
@@ -682,14 +758,14 @@ function deploy_folder(
if all_ok
return DeployDecision(;
- all_ok = true,
- branch = deploy_branch,
- repo = deploy_repo,
- subfolder = subfolder,
- is_preview = is_preview,
+ all_ok=true,
+ branch=deploy_branch,
+ repo=deploy_repo,
+ subfolder=subfolder,
+ is_preview=is_preview,
)
else
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
end
@@ -740,13 +816,13 @@ end
function deploy_folder(
cfg::Buildkite;
repo,
- repo_previews = repo,
+ repo_previews=repo,
devbranch,
push_preview,
devurl,
- branch = "gh-pages",
- branch_previews = branch,
- tag_prefix = "",
+ branch="gh-pages",
+ branch_previews=branch,
+ tag_prefix="",
kwargs...,
)
io = IOBuffer()
@@ -820,7 +896,10 @@ function deploy_folder(
print(io, "Deploying to folder $(repr(subfolder)): $(marker(all_ok))")
@info String(take!(io))
- if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.commit_branch == "main"
+ if build_type === :devbranch &&
+ !branch_ok &&
+ devbranch == "master" &&
+ cfg.commit_branch == "main"
@warn """
Possible deploydocs() misconfiguration: main vs master
Documenter's configured primary development branch (`devbranch`) is "master", but the
@@ -836,14 +915,14 @@ function deploy_folder(
if all_ok
return DeployDecision(;
- all_ok = true,
- branch = deploy_branch,
- repo = deploy_repo,
- subfolder = subfolder,
- is_preview = is_preview,
+ all_ok=true,
+ branch=deploy_branch,
+ repo=deploy_repo,
+ subfolder=subfolder,
+ is_preview=is_preview,
)
else
- return DeployDecision(; all_ok = false)
+ return DeployDecision(; all_ok=false)
end
end
@@ -962,7 +1041,8 @@ function Woodpecker()
else
woodpecker_ci_version = VersionNumber(ENV["CI_SYSTEM_VERSION"])
@warn "Current Woodpecker version is $(woodpecker_ci_version). Make sure this is correct."
- if ENV["CI"] == "drone" && (v"1.0.0" > VersionNumber(ENV["CI_SYSTEM_VERSION"]) >= v"0.15.0")
+ if ENV["CI"] == "drone" &&
+ (v"1.0.0" > VersionNumber(ENV["CI_SYSTEM_VERSION"]) >= v"0.15.0")
@warn """Woodpecker prior version 1.0.0 is backward compatible to Drone
but *there will be breaking changes in the future*. Please update
to a newer version """
@@ -979,14 +1059,28 @@ function Woodpecker()
woodpecker_repo = get(ENV, "CI_REPO", "")
woodpecker_event_name = get(ENV, "CI_BUILD_EVENT", "")
woodpecker_ref = get(ENV, "CI_COMMIT_REF", "")
- return Woodpecker(woodpecker_ci_version, woodpecker_forge_url, woodpecker_repo, woodpecker_tag, woodpecker_event_name, woodpecker_ref)
+ return Woodpecker(
+ woodpecker_ci_version,
+ woodpecker_forge_url,
+ woodpecker_repo,
+ woodpecker_tag,
+ woodpecker_event_name,
+ woodpecker_ref
+ )
else
woodpecker_forge_url = get(ENV, "CI_FORGE_URL", "")
woodpecker_tag = get(ENV, "CI_COMMIT_TAG", "")
woodpecker_repo = get(ENV, "CI_REPO", "") # repository full name /
woodpecker_event_name = get(ENV, "CI_PIPELINE_EVENT", "") # build event (push, pull_request, tag, deployment)
woodpecker_ref = get(ENV, "CI_COMMIT_REF", "") # commit ref
- return Woodpecker(woodpecker_ci_version, woodpecker_forge_url, woodpecker_repo, woodpecker_tag, woodpecker_event_name, woodpecker_ref)
+ return Woodpecker(
+ woodpecker_ci_version,
+ woodpecker_forge_url,
+ woodpecker_repo,
+ woodpecker_tag,
+ woodpecker_event_name,
+ woodpecker_ref
+ )
end
end
@@ -1000,7 +1094,8 @@ function deploy_folder(
push_preview,
devurl,
tag_prefix="",
- kwargs...)
+ kwargs...
+)
io = IOBuffer()
all_ok = true
if cfg.woodpecker_event_name == "pull_request"
@@ -1019,7 +1114,10 @@ function deploy_folder(
repo_ok = occursin(cfg.woodpecker_repo, repo)
all_ok &= repo_ok
- println(io, "- $(marker(repo_ok)) ENV[\"CI_REPO\"]=\"$(cfg.woodpecker_repo)\" occursin in repo=\"$(repo)\"")
+ println(
+ io,
+ "- $(marker(repo_ok)) ENV[\"CI_REPO\"]=\"$(cfg.woodpecker_repo)\" occursin in repo=\"$(repo)\""
+ )
ci_event_env_name = if haskey(ENV, "CI_PIPELINE_EVENT")
"ENV[\"CI_PIPELINE_EVENT\"]"
@@ -1028,13 +1126,20 @@ function deploy_folder(
end
if build_type === :release
- event_ok = in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"])
+ event_ok =
+ in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"])
all_ok &= event_ok
- println(io, "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\" or \"tag\"")
+ println(
+ io,
+ "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\" or \"tag\""
+ )
tag_nobuild = version_tag_strip_build(cfg.woodpecker_tag; tag_prefix)
tag_ok = tag_nobuild !== nothing
all_ok &= tag_ok
- println(io, "- $(marker(tag_ok)) ENV[\"CI_COMMIT_TAG\"]=\"$(cfg.woodpecker_tag)\" contains a valid VersionNumber")
+ println(
+ io,
+ "- $(marker(tag_ok)) ENV[\"CI_COMMIT_TAG\"]=\"$(cfg.woodpecker_tag)\" contains a valid VersionNumber"
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -1042,14 +1147,21 @@ function deploy_folder(
subfolder = tag_nobuild
elseif build_type === :devbranch
## Do not deploy for PRs
- event_ok = in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"])
+ event_ok =
+ in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"])
all_ok &= event_ok
- println(io, "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\", or \"tag\"")
+ println(
+ io,
+ "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\", or \"tag\""
+ )
## deploydocs' devbranch should match the current branch
m = match(r"^refs\/heads\/(.*)$", cfg.woodpecker_ref)
branch_ok = (m === nothing) ? false : String(m.captures[1]) == devbranch
all_ok &= branch_ok
- println(io, "- $(marker(branch_ok)) ENV[\"CI_COMMIT_REF\"] matches devbranch=\"$(devbranch)\"")
+ println(
+ io,
+ "- $(marker(branch_ok)) ENV[\"CI_COMMIT_REF\"] matches devbranch=\"$(devbranch)\""
+ )
deploy_branch = branch
deploy_repo = repo
is_preview = false
@@ -1058,17 +1170,34 @@ function deploy_folder(
else # build_type === :preview
m = match(r"refs\/pull\/(\d+)\/merge", cfg.woodpecker_ref)
pr_number1 = tryparse(Int, (m === nothing) ? "" : m.captures[1])
- pr_number2 = tryparse(Int, get(ENV, "CI_COMMIT_PULL_REQUEST", nothing) === nothing ? "" : ENV["CI_COMMIT_PULL_REQUEST"])
+ pr_number2 = tryparse(
+ Int,
+ if get(ENV, "CI_COMMIT_PULL_REQUEST", nothing) === nothing
+ ""
+ else
+ ENV["CI_COMMIT_PULL_REQUEST"]
+ end
+ )
# Check if both are Ints. If both are Ints, then check if they are equal, otherwise, return false
- pr_numbers_ok = all(x -> x isa Int, [pr_number1, pr_number2]) ? (pr_number1 == pr_number2) : false
+ pr_numbers_ok = if all(x -> x isa Int, [pr_number1, pr_number2])
+ (pr_number1 == pr_number2)
+ else
+ false
+ end
is_pull_request_ok = cfg.woodpecker_event_name == "pull_request"
pr_ok = pr_numbers_ok == is_pull_request_ok
all_ok &= pr_ok
println(io, "- $(marker(pr_numbers_ok)) ENV[\"CI_COMMIT_REF\"] corresponds to a PR")
- println(io, "- $(marker(is_pull_request_ok)) $(ci_event_env_name) matches built type: `pull_request`")
+ println(
+ io,
+ "- $(marker(is_pull_request_ok)) $(ci_event_env_name) matches built type: `pull_request`"
+ )
btype_ok = push_preview
all_ok &= btype_ok
- println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`")
+ println(
+ io,
+ "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`"
+ )
deploy_branch = branch_previews
deploy_repo = repo_previews
is_preview = true
@@ -1084,21 +1213,31 @@ function deploy_folder(
if key_ok
println(io, "- $(marker(key_ok)) ENV[\"DOCUMENTER_KEY\"] exists and is non-empty")
elseif token_ok
- println(io, "- $(marker(token_ok)) ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty")
+ println(
+ io,
+ "- $(marker(token_ok)) ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty"
+ )
else
- println(io, "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty")
+ println(
+ io,
+ "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty"
+ )
end
print(io, "Deploying: $(marker(all_ok))")
@info String(take!(io))
- if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.woodpecker_ref == "refs/heads/main"
+ if build_type === :devbranch &&
+ !branch_ok &&
+ devbranch == "master" &&
+ cfg.woodpecker_ref == "refs/heads/main"
@warn """
Possible deploydocs() misconfiguration: main vs master. Current branch (from \$CI_COMMIT_REF) is "main".
"""
end
if all_ok
- return DeployDecision(; all_ok=true,
+ return DeployDecision(;
+ all_ok=true,
branch=deploy_branch,
is_preview=is_preview,
repo=deploy_repo,
diff --git a/src/deploydocs.jl b/src/deploydocs.jl
index 8b67869ac5..3652603f55 100644
--- a/src/deploydocs.jl
+++ b/src/deploydocs.jl
@@ -179,29 +179,24 @@ using the [`deploydocs`](@ref) function to automatically generate docs and push
GitHub.
"""
function deploydocs(;
- root = currentdir(),
- target = "build",
- dirname = "",
-
- repo = error("no 'repo' keyword provided."),
- branch = "gh-pages",
-
- repo_previews = repo,
- branch_previews = branch,
-
- deps = nothing,
- make = nothing,
-
- devbranch = nothing,
- devurl = "dev",
- versions = ["stable" => "v^", "v#.#", devurl => devurl],
- forcepush::Bool = false,
- deploy_config = auto_detect_deploy_system(),
- push_preview::Bool = false,
- tag_prefix = "",
-
- archive = nothing, # experimental and undocumented
- )
+ root=currentdir(),
+ target="build",
+ dirname="",
+ repo = error("no 'repo' keyword provided."),
+ branch = "gh-pages",
+ repo_previews = repo,
+ branch_previews = branch,
+ deps = nothing,
+ make = nothing,
+ devbranch=nothing,
+ devurl="dev",
+ versions=["stable" => "v^", "v#.#", devurl => devurl],
+ forcepush::Bool=false,
+ deploy_config=auto_detect_deploy_system(),
+ push_preview::Bool=false,
+ tag_prefix="",
+ archive=nothing, # experimental and undocumented
+)
# Try to figure out default branch (see #1443 and #1727)
if devbranch === nothing
@@ -215,15 +210,17 @@ function deploydocs(;
ispath(archive) && error("Output archive exists: $archive")
end
- deploy_decision = deploy_folder(deploy_config;
- branch=branch,
- branch_previews=branch_previews,
- devbranch=devbranch,
- devurl=devurl,
- push_preview=push_preview,
- repo=repo,
- repo_previews=repo_previews,
- tag_prefix)
+ deploy_decision = deploy_folder(
+ deploy_config;
+ branch=branch,
+ branch_previews=branch_previews,
+ devbranch=devbranch,
+ devurl=devurl,
+ push_preview=push_preview,
+ repo=repo,
+ repo_previews=repo_previews,
+ tag_prefix
+ )
if deploy_decision.all_ok
deploy_branch = deploy_decision.branch
deploy_repo = deploy_decision.repo
@@ -271,11 +268,20 @@ function deploydocs(;
@debug "pushing new documentation to remote: '$deploy_repo:$deploy_branch'."
mktempdir() do temp
git_push(
- root, temp, deploy_repo;
- branch=deploy_branch, dirname=dirname, target=target,
- sha=sha, deploy_config=deploy_config, subfolder=deploy_subfolder,
- devurl=devurl, versions=versions, forcepush=forcepush,
- is_preview=deploy_is_preview, archive=archive,
+ root,
+ temp,
+ deploy_repo;
+ branch=deploy_branch,
+ dirname=dirname,
+ target=target,
+ sha=sha,
+ deploy_config=deploy_config,
+ subfolder=deploy_subfolder,
+ devurl=devurl,
+ versions=versions,
+ forcepush=forcepush,
+ is_preview=deploy_is_preview,
+ archive=archive,
)
end
end
@@ -293,11 +299,21 @@ Handles pushing changes to the remote documentation branch.
The documentation are placed in the folder specified by `subfolder`.
"""
function git_push(
- root, temp, repo;
- branch="gh-pages", dirname="", target="site", sha="", devurl="dev",
- versions, forcepush=false, deploy_config, subfolder,
- is_preview::Bool = false, archive,
- )
+ root,
+ temp,
+ repo;
+ branch="gh-pages",
+ dirname="",
+ target="site",
+ sha="",
+ devurl="dev",
+ versions,
+ forcepush=false,
+ deploy_config,
+ subfolder,
+ is_preview::Bool=false,
+ archive,
+)
dirname = isempty(dirname) ? temp : joinpath(temp, dirname)
isdir(dirname) || mkpath(dirname)
@@ -358,7 +374,11 @@ function git_push(
# Create the versions.js file containing a list of `entries`.
# This must always happen after the folder copying.
- HTMLWriter.generate_version_file(joinpath(dirname, "versions.js"), entries, symlinks)
+ HTMLWriter.generate_version_file(
+ joinpath(dirname, "versions.js"),
+ entries,
+ symlinks
+ )
# Create the index.html file to redirect ./stable or ./dev.
# This must always happen after the folder copying.
@@ -371,15 +391,23 @@ function git_push(
if i === nothing
rm_and_add_symlink(kv.second, kv.first)
else
- throw(ArgumentError(string("link `$(kv)` cannot overwrite ",
- "`devurl = $(devurl)` with the same name.")))
+ throw(
+ ArgumentError(
+ string(
+ "link `$(kv)` cannot overwrite ",
+ "`devurl = $(devurl)` with the same name."
+ )
+ )
+ )
end
end
end
end
# Add, commit, and push the docs to the remote.
- run(`$(git()) add -A -- ':!.documenter-identity-file.tmp' ':!**/.documenter-identity-file.tmp'`)
+ run(
+ `$(git()) add -A -- ':!.documenter-identity-file.tmp' ':!**/.documenter-identity-file.tmp'`
+ )
if !success(`$(git()) diff --cached --exit-code`)
if !isnothing(archive)
run(`$(git()) commit -m "build based on $sha"`)
@@ -423,16 +451,18 @@ function git_push(
try
mktemp() do sshconfig, io
- print(io,
- """
- Host $host
- StrictHostKeyChecking no
- User $user
- HostName $host
- IdentityFile "$keyfile"
- IdentitiesOnly yes
- BatchMode yes
- """)
+ print(
+ io,
+ """
+ Host $host
+ StrictHostKeyChecking no
+ User $user
+ HostName $host
+ IdentityFile "$keyfile"
+ IdentitiesOnly yes
+ BatchMode yes
+ """
+ )
close(io)
chmod(sshconfig, 0o600)
# git config core.sshCommand requires git 2.10.0, but
@@ -443,7 +473,7 @@ function git_push(
end
post_status(deploy_config; repo=repo, type="success", subfolder=subfolder)
catch e
- @error "Failed to push:" exception=(e, catch_backtrace())
+ @error "Failed to push:" exception = (e, catch_backtrace())
post_status(deploy_config; repo=repo, type="error")
rethrow(e)
finally
@@ -457,7 +487,7 @@ function git_push(
cd(() -> withenv(git_commands, NO_KEY_ENV...), temp)
post_status(deploy_config; repo=repo, type="success", subfolder=subfolder)
catch e
- @error "Failed to push:" exception=(e, catch_backtrace())
+ @error "Failed to push:" exception = (e, catch_backtrace())
post_status(deploy_config; repo=repo, type="error")
rethrow(e)
end
@@ -467,7 +497,7 @@ end
function rm_and_add_symlink(target, link)
if ispath(link) || islink(link)
@warn "removing `$(link)` and linking `$(link)` to `$(target)`."
- rm(link; force = true, recursive = true)
+ rm(link; force=true, recursive=true)
end
symlink(target, link)
end
@@ -480,7 +510,8 @@ Disassemble repo address into user, host, and path to repo. If no user is given,
"""
function user_host_upstream(repo)
# If the repo path contains the protocol, throw immediately an error.
- occursin(r"^[a-z]+://", repo) && error("The repo path $(repo) should not contain the protocol")
+ occursin(r"^[a-z]+://", repo) &&
+ error("The repo path $(repo) should not contain the protocol")
#= the regex has three parts:
(?:([^@]*)@)? matches any number of characters up to the first "@", if present,
capturing only the characters before the "@" - this captures the username
diff --git a/src/docchecks.jl b/src/docchecks.jl
index 89b490fbc5..5d56233352 100644
--- a/src/docchecks.jl
+++ b/src/docchecks.jl
@@ -27,10 +27,13 @@ function missingdocs(doc::Document)
end
end
println(b)
- print(b, """
- These are docstrings in the checked modules (configured with the modules keyword)
- that are not included in canonical @docs or @autodocs blocks.
- """)
+ print(
+ b,
+ """
+ These are docstrings in the checked modules (configured with the modules keyword)
+ that are not included in canonical @docs or @autodocs blocks.
+ """
+ )
@docerror(doc, :missing_docs, String(take!(b)))
end
return n
@@ -48,13 +51,17 @@ function missingbindings(doc::Document)
# module bindings that come from Docs.meta() always appear to be of the form
# Docs.Binding(Mod.SubMod, :SubMod) (since Julia 0.7). We therefore "normalize"
# module bindings before we search in the list returned by allbindings().
- binding = if DocSystem.defined(object.binding) && !DocSystem.iskeyword(object.binding)
- m = DocSystem.resolve(object.binding)
- isa(m, Module) && nameof(object.binding.mod) != object.binding.var ?
- Docs.Binding(m, nameof(m)) : object.binding
- else
- object.binding
- end
+ binding =
+ if DocSystem.defined(object.binding) && !DocSystem.iskeyword(object.binding)
+ m = DocSystem.resolve(object.binding)
+ if isa(m, Module) && nameof(object.binding.mod) != object.binding.var
+ Docs.Binding(m, nameof(m))
+ else
+ object.binding
+ end
+ else
+ object.binding
+ end
if haskey(bindings, binding)
signatures = bindings[binding]
if object.signature ≡ Union{} || length(signatures) ≡ 1
@@ -68,14 +75,14 @@ function missingbindings(doc::Document)
end
function allbindings(checkdocs::Symbol, mods)
- out = Dict{Binding, Set{Type}}()
+ out = Dict{Binding,Set{Type}}()
for m in mods
allbindings(checkdocs, m, out)
end
out
end
-function allbindings(checkdocs::Symbol, mod::Module, out = Dict{Binding, Set{Type}}())
+function allbindings(checkdocs::Symbol, mod::Module, out=Dict{Binding,Set{Type}}())
for (binding, doc) in meta(mod)
# The keys of the docs meta dictionary should always be Docs.Binding objects in
# practice. However, the key type is Any, so it is theoretically possible that
@@ -117,9 +124,9 @@ function footnotes(doc::Document)
#
# For all ids the final result should be `(N, 1)` where `N > 1`, i.e. one or more
# footnote references and a single footnote body.
- footnotes = Dict{Page, Dict{String, Tuple{Int, Int}}}()
+ footnotes = Dict{Page,Dict{String,Tuple{Int,Int}}}()
for (src, page) in doc.blueprint.pages
- orphans = Dict{String, Tuple{Int, Int}}()
+ orphans = Dict{String,Tuple{Int,Int}}()
for node in AbstractTrees.PreOrderDFS(page.mdast)
footnote(node.element, orphans)
end
@@ -129,15 +136,27 @@ function footnotes(doc::Document)
for (id, (ids, bodies)) in orphans
# Multiple footnote bodies.
if bodies > 1
- @docerror(doc, :footnote, "footnote '$id' has $bodies bodies in $(locrepr(page.source)).")
+ @docerror(
+ doc,
+ :footnote,
+ "footnote '$id' has $bodies bodies in $(locrepr(page.source))."
+ )
end
# No footnote references for an id.
if ids === 0
- @docerror(doc, :footnote, "unused footnote named '$id' in $(locrepr(page.source)).")
+ @docerror(
+ doc,
+ :footnote,
+ "unused footnote named '$id' in $(locrepr(page.source))."
+ )
end
# No footnote bodies for an id.
if bodies === 0
- @docerror(doc, :footnote, "no footnotes found for '$id' in $(locrepr(page.source)).")
+ @docerror(
+ doc,
+ :footnote,
+ "no footnotes found for '$id' in $(locrepr(page.source))."
+ )
end
end
end
@@ -158,7 +177,13 @@ footnote(other, orphans::Dict) = true
# Link Checks.
# ------------
-hascurl() = (try; success(`curl --version`); catch err; false; end)
+hascurl() = (
+ try
+ success(`curl --version`)
+ catch err
+ false
+ end
+)
"""
$(SIGNATURES)
@@ -184,14 +209,23 @@ function linkcheck(mdast::MarkdownAST.Node, doc::Document)
end
end
-function linkcheck(node::MarkdownAST.Node, element::MarkdownAST.AbstractElement, doc::Document)
+function linkcheck(
+ node::MarkdownAST.Node,
+ element::MarkdownAST.AbstractElement,
+ doc::Document
+)
# The linkcheck is only active for specific `element` types
# (`MarkdownAST.Link`, most importantly), which are defined below as more
# specific methods
return nothing
end
-function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document; method::Symbol=:HEAD)
+function linkcheck(
+ node::MarkdownAST.Node,
+ link::MarkdownAST.Link,
+ doc::Document;
+ method::Symbol=:HEAD
+)
# first, make sure we're not supposed to ignore this link
for r in doc.user.linkcheck_ignore
@@ -212,12 +246,16 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document
# Mozilla developer docs, but only is it's a HTTP(S) request.
#
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent#chrome_ua_string
- fakebrowser = startswith(uppercase(link.destination), "HTTP") ? [
+ fakebrowser = if startswith(uppercase(link.destination), "HTTP")
+ [
"--user-agent",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"-H",
"accept-encoding: gzip, deflate, br",
- ] : ""
+ ]
+ else
+ ""
+ end
cmd = `curl $(method === :HEAD ? "-sI" : "-s") --proto =http,https,ftp,ftps $(fakebrowser) $(link.destination) --max-time $timeout -o $null_file --write-out "%{http_code} %{url_effective} %{redirect_url}"`
local result
@@ -234,11 +272,16 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document
status, scheme, location = matched.captures
status = parse(Int, status)
scheme = uppercase(scheme)
- protocol = startswith(scheme, "HTTP") ? :HTTP :
- startswith(scheme, "FTP") ? :FTP : :UNKNOWN
+ protocol = if startswith(scheme, "HTTP")
+ :HTTP
+ elseif startswith(scheme, "FTP")
+ :FTP
+ else
+ :UNKNOWN
+ end
if (protocol === :HTTP && (status < 300 || status == 302)) ||
- (protocol === :FTP && (200 <= status < 300 || status == 350))
+ (protocol === :FTP && (200 <= status < 300 || status == 350))
if location !== nothing
@debug "linkcheck '$(link.destination)' status: $(status), redirects to '$(location)'"
else
@@ -255,7 +298,11 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document
@debug "linkcheck '$(link.destination)' status: $(status), retrying without `-I`"
return linkcheck(node, link, doc; method=:GET)
else
- @docerror(doc, :linkcheck, "linkcheck '$(link.destination)' status: $(status).")
+ @docerror(
+ doc,
+ :linkcheck,
+ "linkcheck '$(link.destination)' status: $(status)."
+ )
end
else
@docerror(doc, :linkcheck, "invalid result returned by $cmd:", result)
@@ -305,7 +352,9 @@ function tag(repo, tag_ref)
if status == 404
return nothing
elseif status != 200
- error("Unexpected error code $(status) '$(repo)' while getting tag '$(tag_ref)'.")
+ error(
+ "Unexpected error code $(status) '$(repo)' while getting tag '$(tag_ref)'."
+ )
end
end
return result
@@ -314,7 +363,9 @@ end
function gitcommit(repo, commit_tag)
status, result = gh_get_json("/repos/$(repo)/git/commits/$(commit_tag)")
if status != 200
- error("Unexpected error code $(status) '$(repo)' while getting commit '$(commit_tag)'.")
+ error(
+ "Unexpected error code $(status) '$(repo)' while getting commit '$(commit_tag)'."
+ )
end
return result
end
@@ -362,11 +413,19 @@ function githubcheck(doc::Document)
tag_guess = remote_repo.commit
tag_ref = tag(repo, tag_guess)
if tag_ref === nothing
- @docerror(doc, :linkcheck_remotes, "linkcheck (remote) '$(repo)' error while getting tag '$(tag_guess)'. $(GITHUB_ERROR_ADVICE)")
+ @docerror(
+ doc,
+ :linkcheck_remotes,
+ "linkcheck (remote) '$(repo)' error while getting tag '$(tag_guess)'. $(GITHUB_ERROR_ADVICE)"
+ )
return
end
if tag_ref["object"]["type"] != "commit"
- @docerror(doc, :linkcheck_remotes, "linkcheck (remote) '$(repo)' tag '$(tag_guess)' does not point to a commit. $(GITHUB_ERROR_ADVICE)")
+ @docerror(
+ doc,
+ :linkcheck_remotes,
+ "linkcheck (remote) '$(repo)' tag '$(tag_guess)' does not point to a commit. $(GITHUB_ERROR_ADVICE)"
+ )
return
end
commit_sha = tag_ref["object"]["sha"]
diff --git a/src/docs_precompile/make.jl b/src/docs_precompile/make.jl
index 611881bb32..ecfd84ac70 100644
--- a/src/docs_precompile/make.jl
+++ b/src/docs_precompile/make.jl
@@ -1,12 +1,10 @@
using Documenter, Logging
with_logger(NullLogger()) do
-makedocs(
- sitename = "TestPkg",
- pages = Any[
- "Home" => "index.md",
- ],
- build = mktempdir(),
- remotes = nothing,
-)
+ makedocs(
+ sitename="TestPkg",
+ pages=Any["Home"=>"index.md",],
+ build=mktempdir(),
+ remotes=nothing,
+ )
end
diff --git a/src/doctest.jl b/src/doctest.jl
index 8c8640a77d..fb41df2df7 100644
--- a/src/doctest.jl
+++ b/src/doctest.jl
@@ -23,12 +23,12 @@ function doctest(package::Module; manual=true, testset=nothing, kwargs...)
end
source = nothing
if manual === true
- source = normpath(joinpath(dirname(pathof(package)), "..", "docs", "src"))
- isdir(source) || throw(ArgumentError("""
- Package $(package) does not have a documentation source directory at standard location.
- Searched at: $(source)
- If ...
- """))
+ source = normpath(joinpath(dirname(pathof(package)), "..", "docs", "src"))
+ isdir(source) || throw(ArgumentError("""
+ Package $(package) does not have a documentation source directory at standard location.
+ Searched at: $(source)
+ If ...
+ """))
end
testset = (testset === nothing) ? "Doctests: $(package)" : testset
doctest(source, [package]; testset=testset, kwargs...)
@@ -64,13 +64,13 @@ manual pages can be disabled if `source` is set to `nothing`.
fixing fails.
"""
function doctest(
- source::Union{AbstractString,Nothing},
- modules::AbstractVector{Module};
- fix = false,
- testset = "Doctests",
- doctestfilters = Regex[],
- plugins = Plugin[],
- )
+ source::Union{AbstractString,Nothing},
+ modules::AbstractVector{Module};
+ fix=false,
+ testset="Doctests",
+ doctestfilters=Regex[],
+ plugins=Plugin[],
+)
function all_doctests()
dir = mktempdir()
try
@@ -80,12 +80,12 @@ function doctest(
mkdir(source)
end
makedocs(;
- root = dir,
- source = source,
- sitename = "",
- doctest = fix ? :fix : :only,
- modules = modules,
- doctestfilters = doctestfilters,
+ root=dir,
+ source=source,
+ sitename="",
+ doctest=fix ? :fix : :only,
+ modules=modules,
+ doctestfilters=doctestfilters,
# When doctesting, we don't really want to get bogged down with issues
# related to determining the remote repositories for edit URLs and such
remotes = nothing,
@@ -93,13 +93,14 @@ function doctest(
)
true
catch err
- @error "Doctesting failed" exception=(err, catch_backtrace())
+ @error "Doctesting failed" exception = (err, catch_backtrace())
false
finally
try
rm(dir; recursive=true)
catch e
- @warn "Documenter was unable to clean up the temporary directory $(dir)" exception = e
+ @warn "Documenter was unable to clean up the temporary directory $(dir)" exception =
+ e
end
end
end
diff --git a/src/doctests.jl b/src/doctests.jl
index 685cc73acb..ef00e611be 100644
--- a/src/doctests.jl
+++ b/src/doctests.jl
@@ -5,15 +5,16 @@
# -------------------------
mutable struct MutableMD2CodeBlock
- language :: String
- code :: String
+ language::String
+ code::String
end
-MutableMD2CodeBlock(block :: MarkdownAST.CodeBlock) = MutableMD2CodeBlock(block.info, block.code)
+MutableMD2CodeBlock(block::MarkdownAST.CodeBlock) =
+ MutableMD2CodeBlock(block.info, block.code)
struct DocTestContext
- file :: String
- doc :: Documenter.Document
- meta :: Dict{Symbol, Any}
+ file::String
+ doc::Documenter.Document
+ meta::Dict{Symbol,Any}
DocTestContext(file::String, doc::Documenter.Document) = new(file, doc, Dict())
end
@@ -77,13 +78,18 @@ end
function parse_metablock(ctx::DocTestContext, block::MarkdownAST.CodeBlock)
@assert startswith(block.info, "@meta")
- meta = Dict{Symbol, Any}()
+ meta = Dict{Symbol,Any}()
for (ex, str) in Documenter.parseblock(block.code, ctx.doc, ctx.file)
if Documenter.isassign(ex)
try
meta[ex.args[1]] = Core.eval(Main, ex.args[2])
catch err
- @docerror(ctx.doc, :meta_block, "Failed to evaluate `$(strip(str))` in `@meta` block.", exception = err)
+ @docerror(
+ ctx.doc,
+ :meta_block,
+ "Failed to evaluate `$(strip(str))` in `@meta` block.",
+ exception = err
+ )
end
end
end
@@ -105,7 +111,7 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
lang = block_immutable.info
if startswith(lang, "jldoctest")
# Define new module or reuse an old one from this page if we have a named doctest.
- name = match(r"jldoctest[ ]?(.*)$", split(lang, ';', limit = 2)[1])[1]
+ name = match(r"jldoctest[ ]?(.*)$", split(lang, ';', limit=2)[1])[1]
sandbox = Documenter.get_sandbox_module!(ctx.meta, "doctest", name)
# Normalise line endings.
@@ -122,7 +128,9 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
e isa Meta.ParseError || rethrow(e)
file = ctx.meta[:CurrentFile]
lines = Documenter.find_block_in_file(block.code, file)
- @docerror(ctx.doc, :doctest,
+ @docerror(
+ ctx.doc,
+ :doctest,
"""
Unable to parse doctest keyword arguments in $(Documenter.locrepr(file, lines))
Use ```jldoctest name; key1 = value1, key2 = value2
@@ -130,14 +138,18 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
```$(lang)
$(block.code)
```
- """, parse_error = e)
+ """,
+ parse_error = e
+ )
return false
end
for kwarg in kwargs.args
if !(isa(kwarg, Expr) && kwarg.head === :(=) && isa(kwarg.args[1], Symbol))
file = ctx.meta[:CurrentFile]
lines = Documenter.find_block_in_file(block.code, file)
- @docerror(ctx.doc, :doctest,
+ @docerror(
+ ctx.doc,
+ :doctest,
"""
invalid syntax for doctest keyword arguments in $(Documenter.locrepr(file, lines))
Use ```jldoctest name; key1 = value1, key2 = value2
@@ -145,7 +157,8 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
```$(lang)
$(block.code)
```
- """)
+ """
+ )
return false
end
d[kwarg.args[1]] = Core.eval(sandbox, kwarg.args[2])
@@ -153,14 +166,20 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
end
ctx.meta[:LocalDocTestArguments] = d
- for expr in [get(ctx.meta, :DocTestSetup, []); get(ctx.meta[:LocalDocTestArguments], :setup, [])]
+ for expr in [
+ get(ctx.meta, :DocTestSetup, [])
+ get(ctx.meta[:LocalDocTestArguments], :setup, [])
+ ]
Meta.isexpr(expr, :block) && (expr.head = :toplevel)
try
Core.eval(sandbox, expr)
catch e
push!(ctx.doc.internal.errors, :doctest)
- @error("could not evaluate expression from doctest setup.",
- expression = expr, exception = e)
+ @error(
+ "could not evaluate expression from doctest setup.",
+ expression = expr,
+ exception = e
+ )
return false
end
end
@@ -171,7 +190,9 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
elseif occursin(r"^# output\s+$"m, block.code)
file = ctx.meta[:CurrentFile]
lines = Documenter.find_block_in_file(block.code, file)
- @docerror(ctx.doc, :doctest,
+ @docerror(
+ ctx.doc,
+ :doctest,
"""
invalid doctest block in $(Documenter.locrepr(file, lines))
Requires `# output` without trailing whitespace
@@ -179,11 +200,14 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
```$(lang)
$(block.code)
```
- """)
+ """
+ )
else
file = ctx.meta[:CurrentFile]
lines = Documenter.find_block_in_file(block.code, file)
- @docerror(ctx.doc, :doctest,
+ @docerror(
+ ctx.doc,
+ :doctest,
"""
invalid doctest block in $(Documenter.locrepr(file, lines))
Requires `julia> ` or `# output`
@@ -191,7 +215,8 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock)
```$(lang)
$(block.code)
```
- """)
+ """
+ )
end
delete!(ctx.meta, :LocalDocTestArguments)
end
@@ -220,14 +245,16 @@ function eval_repl(block, sandbox, meta::Dict, doc::Documenter.Document, page)
src_lines = Documenter.find_block_in_file(block.code, meta[:CurrentFile])
for (input, output) in repl_splitter(block.code)
result = Result(block, input, output, meta[:CurrentFile])
- for (ex, str) in Documenter.parseblock(input, doc, page; keywords = false, raise=false)
+ for (ex, str) in
+ Documenter.parseblock(input, doc, page; keywords=false, raise=false)
# Input containing a semi-colon gets suppressed in the final output.
- @debug "Evaluating REPL line from doctest at $(Documenter.locrepr(result.file, src_lines))" unparsed_string = str parsed_expression = ex
+ @debug "Evaluating REPL line from doctest at $(Documenter.locrepr(result.file, src_lines))" unparsed_string =
+ str parsed_expression = ex
result.hide = REPL.ends_with_semicolon(str)
# Use the REPL softscope for REPL jldoctests,
# see https://github.com/JuliaLang/julia/pull/33864
ex = REPL.softscope(ex)
- c = IOCapture.capture(rethrow = InterruptException) do
+ c = IOCapture.capture(rethrow=InterruptException) do
Core.eval(sandbox, ex)
end
Core.eval(sandbox, Expr(:global, Expr(:(=), :ans, QuoteNode(c.value))))
@@ -249,12 +276,12 @@ function eval_script(block, sandbox, meta::Dict, doc::Documenter.Document, page)
#
#
# to mark `input`/`output` separation.
- input, output = split(block.code, r"^# output$"m, limit = 2)
- input = rstrip(input, '\n')
+ input, output = split(block.code, r"^# output$"m, limit=2)
+ input = rstrip(input, '\n')
output = lstrip(output, '\n')
result = Result(block, input, output, meta[:CurrentFile])
- for (ex, str) in Documenter.parseblock(input, doc, page; keywords = false, raise=false)
- c = IOCapture.capture(rethrow = InterruptException) do
+ for (ex, str) in Documenter.parseblock(input, doc, page; keywords=false, raise=false)
+ c = IOCapture.capture(rethrow=InterruptException) do
Core.eval(sandbox, ex)
end
result.value = c.value
@@ -273,7 +300,7 @@ function filter_doctests(filters, strings)
# removed before comparing the inputs and outputs of a doctest. However, it can
# also be a regex => substitution pair in which case the match gets replaced by
# the substitution string.
- r, s = if isa(rs, Pair{Regex,T} where T <: AbstractString)
+ r, s = if isa(rs, Pair{Regex,T} where {T<:AbstractString})
rs
elseif isa(rs, Regex) || isa(rs, AbstractString)
rs, ""
@@ -298,15 +325,18 @@ function checkresult(sandbox::Module, result::Result, meta::Dict, doc::Documente
# To avoid dealing with path/line number issues in backtraces we use `[...]` to
# mark ignored output from an error message. Only the text prior to it is used to
# test for doctest success/failure.
- head = replace(split(result.output, "\n[...]"; limit = 2)[1], mod_regex => "")
+ head = replace(split(result.output, "\n[...]"; limit=2)[1], mod_regex => "")
head = replace(head, mod_regex_nodot => "Main")
- str = error_to_string(outio, result.value, result.bt)
- str = replace(str, mod_regex => "")
- str = replace(str, mod_regex_nodot => "Main")
+ str = error_to_string(outio, result.value, result.bt)
+ str = replace(str, mod_regex => "")
+ str = replace(str, mod_regex_nodot => "Main")
filteredstr, filteredhead = filter_doctests(filters, (str, head))
@debug debug_report(
- result=result, filters = filters, expected_filtered = filteredhead,
- evaluated = rstrip(str), evaluated_filtered = filteredstr
+ result=result,
+ filters=filters,
+ expected_filtered=filteredhead,
+ evaluated=rstrip(str),
+ evaluated_filtered=filteredstr
)
# Since checking for the prefix of an error won't catch the empty case we need
# to check that manually with `isempty`.
@@ -327,8 +357,11 @@ function checkresult(sandbox::Module, result::Result, meta::Dict, doc::Documente
str = rstrip(replace(str, mod_regex_nodot => "Main"))
filteredstr, filteredoutput = filter_doctests(filters, (str, output))
@debug debug_report(
- result=result, filters = filters, expected_filtered = filteredoutput,
- evaluated = rstrip(str), evaluated_filtered = filteredstr
+ result=result,
+ filters=filters,
+ expected_filtered=filteredoutput,
+ evaluated=rstrip(str),
+ evaluated_filtered=filteredstr
)
if filteredstr != filteredoutput
if doc.user.doctest === :fix
@@ -366,8 +399,11 @@ function debug_report(; result, expected_filtered, evaluated, evaluated_filtered
"""
if !isempty(filters)
r *= "\n"
- r *= (length(filters) == 1) ? "1 doctest filter was applied:\n\n" :
+ r *= if (length(filters) == 1)
+ "1 doctest filter was applied:\n\n"
+ else
"$(length(filters)) doctest filters were applied:\n\n"
+ end
for rs in filters
r *= " $rs\n"
end
@@ -395,7 +431,8 @@ end
# Display doctesting results.
function result_to_string(buf, value)
- value === nothing || Base.invokelatest(show, IOContext(buf, :limit => true), MIME"text/plain"(), value)
+ value === nothing ||
+ Base.invokelatest(show, IOContext(buf, :limit => true), MIME"text/plain"(), value)
return sanitise(buf)
end
@@ -404,7 +441,7 @@ function error_to_string(buf, er, bt)
bt = Documenter.remove_common_backtrace(bt, backtrace())
# Remove everything below the last eval call (which should be the one in IOCapture.capture)
index = findlast(ptr -> Base.ip_matches_func(ptr, :eval), bt)
- bt = (index === nothing) ? bt : bt[1:(index - 1)]
+ bt = (index === nothing) ? bt : bt[1:(index-1)]
# Print a REPL-like error message.
print(buf, "ERROR: ")
Base.invokelatest(showerror, buf, er, bt)
@@ -426,26 +463,31 @@ function report(result::Result, str, doc::Documenter.Document)
diff = TextDiff.Diff{TextDiff.Words}(result.output, rstrip(str))
lines = Documenter.find_block_in_file(result.block.code, result.file)
line = lines === nothing ? nothing : first(lines)
- @error("""
- doctest failure in $(Documenter.locrepr(result.file, lines))
+ @error(
+ """
+ doctest failure in $(Documenter.locrepr(result.file, lines))
- ```$(result.block.language)
- $(result.block.code)
- ```
+ ```$(result.block.language)
+ $(result.block.code)
+ ```
- Subexpression:
+ Subexpression:
- $(result.input)
+ $(result.input)
- Evaluated output:
+ Evaluated output:
- $(rstrip(str))
+ $(rstrip(str))
- Expected output:
+ Expected output:
- $(result.output)
+ $(result.output)
- """, diff, _file=result.file, _line=line)
+ """,
+ diff,
+ _file = result.file,
+ _line = line
+ )
end
function fix_doctest(result::Result, str, doc::Documenter.Document)
@@ -454,7 +496,7 @@ function fix_doctest(result::Result, str, doc::Documenter.Document)
# read the file containing the code block
content = read(filename, String)
# output stream
- io = IOBuffer(sizehint = sizeof(content))
+ io = IOBuffer(sizehint=sizeof(content))
# first look for the entire code block
# make a regex of the code that matches leading whitespace
rcode = "(\\h*)" * replace(Documenter.regex_escape(code), "\\n" => "\\n\\h*")
@@ -488,7 +530,8 @@ function fix_doctest(result::Result, str, doc::Documenter.Document)
newcode *= str
newcode *= code[nextind(code, last(inputidx)):end]
else
- newcode *= replace(code[nextind(code, last(inputidx)):end], result.output => str, count = 1)
+ newcode *=
+ replace(code[nextind(code, last(inputidx)):end], result.output => str, count=1)
end
# replace internal code block with the non-indented new code, needed if we come back
# looking to replace output in the same code block later
@@ -509,8 +552,8 @@ const PROMPT_REGEX = r"^julia> (.*)$"
const SOURCE_REGEX = r"^ (.*)$"
function repl_splitter(code)
- lines = split(string(code, "\n"), '\n')
- input = String[]
+ lines = split(string(code, "\n"), '\n')
+ input = String[]
output = String[]
buffer = IOBuffer() # temporary buffer for doctest inputs and outputs
found_first_prompt = false
diff --git a/src/documents.jl b/src/documents.jl
index 33e72ce035..d7f38b7a08 100644
--- a/src/documents.jl
+++ b/src/documents.jl
@@ -13,7 +13,7 @@ abstract type AbstractDocumenterBlock <: MarkdownAST.AbstractBlock end
"""
mutable struct Globals
mod :: Module
- meta :: Dict{Symbol, Any}
+ meta :: Dict{Symbol,Any}
end
Globals() = Globals(Main, Dict())
@@ -21,21 +21,21 @@ Globals() = Globals(Main, Dict())
Represents a single markdown file.
"""
struct Page
- source :: String
- build :: String
- workdir :: Union{Symbol,String}
+ source::String
+ build::String
+ workdir::Union{Symbol,String}
"""
Ordered list of raw toplevel markdown nodes from the parsed page contents. This vector
should be considered immutable.
"""
- elements :: Vector
+ elements::Vector
"""
Each element in `.elements` maps to an "expanded" element. This may be itself if the
element does not need expanding or some other object, such as a `DocsNode` in the case
of `@docs` code blocks.
"""
- globals :: Globals
- mdast :: MarkdownAST.Node{Nothing}
+ globals::Globals
+ mdast::MarkdownAST.Node{Nothing}
end
function Page(source::AbstractString, build::AbstractString, workdir::AbstractString)
# The Markdown standard library parser is sensitive to line endings:
@@ -59,7 +59,8 @@ function Page(source::AbstractString, build::AbstractString, workdir::AbstractSt
end
# FIXME -- special overload for parseblock
-parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) = parseblock(code, doc, page.source; kwargs...)
+parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) =
+ parseblock(code, doc, page.source; kwargs...)
# Document blueprints.
# --------------------
@@ -67,8 +68,8 @@ parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) = parseb
# Should contain all the information that is necessary to build a document.
# Currently has enough information to just run doctests.
struct DocumentBlueprint
- pages :: Dict{String, Page} # Markdown files only.
- modules :: Set{Module} # Which modules to check for missing docs?
+ pages::Dict{String,Page} # Markdown files only.
+ modules::Set{Module} # Which modules to check for missing docs?
end
@@ -78,24 +79,25 @@ end
## IndexNode.
struct IndexNode <: AbstractDocumenterBlock
- pages :: Vector{String} # Which pages to include in the index? Set by user.
- modules :: Vector{Module} # Which modules to include? Set by user.
- order :: Vector{Symbol} # What order should docs be listed in? Set by user.
- build :: String # Path to the file where this index will appear.
- source :: String # Path to the file where this index was written.
- elements :: Vector # (object, doc, page, mod, cat)-tuple for constructing links.
- codeblock :: MarkdownAST.CodeBlock # original code block
-
- function IndexNode(codeblock;
- # TODO: Fix difference between uppercase and lowercase naming of keys.
- # Perhaps deprecate the uppercase versions? Same with `ContentsNode`.
- Pages = [],
- Modules = [],
- Order = [:module, :constant, :type, :function, :macro],
- build = error("missing value for `build` in `IndexNode`."),
- source = error("missing value for `source` in `IndexNode`."),
- others...
- )
+ pages :: Vector{String} # Which pages to include in the index? Set by user.
+ modules :: Vector{Module} # Which modules to include? Set by user.
+ order :: Vector{Symbol} # What order should docs be listed in? Set by user.
+ build :: String # Path to the file where this index will appear.
+ source :: String # Path to the file where this index was written.
+ elements :: Vector # (object, doc, page, mod, cat)-tuple for constructing links.
+ codeblock :: MarkdownAST.CodeBlock # original code block
+
+ function IndexNode(
+ codeblock;
+ # TODO: Fix difference between uppercase and lowercase naming of keys.
+ # Perhaps deprecate the uppercase versions? Same with `ContentsNode`.
+ Pages=[],
+ Modules=[],
+ Order=[:module, :constant, :type, :function, :macro],
+ build=error("missing value for `build` in `IndexNode`."),
+ source=error("missing value for `source` in `IndexNode`."),
+ others...
+ )
if !isempty(others)
@warn(
"In file $source: the following unsupported keyword " *
@@ -110,21 +112,22 @@ end
## ContentsNode.
struct ContentsNode <: AbstractDocumenterBlock
- pages :: Vector{String} # Which pages should be included in contents? Set by user.
- mindepth :: Int # Minimum header level that should be displayed. Set by user.
- depth :: Int # Down to which level should headers be displayed? Set by user.
- build :: String # Same as for `IndexNode`s.
- source :: String # Same as for `IndexNode`s.
- elements :: Vector # (order, page, anchor)-tuple for constructing links.
- codeblock :: MarkdownAST.CodeBlock # original code block
-
- function ContentsNode(codeblock;
- Pages = [],
- Depth = 1:2,
- build = error("missing value for `build` in `ContentsNode`."),
- source = error("missing value for `source` in `ContentsNode`."),
- others...
- )
+ pages :: Vector{String} # Which pages should be included in contents? Set by user.
+ mindepth :: Int # Minimum header level that should be displayed. Set by user.
+ depth :: Int # Down to which level should headers be displayed? Set by user.
+ build :: String # Same as for `IndexNode`s.
+ source :: String # Same as for `IndexNode`s.
+ elements :: Vector # (order, page, anchor)-tuple for constructing links.
+ codeblock :: MarkdownAST.CodeBlock # original code block
+
+ function ContentsNode(
+ codeblock;
+ Pages=[],
+ Depth=1:2,
+ build=error("missing value for `build` in `ContentsNode`."),
+ source=error("missing value for `source` in `ContentsNode`."),
+ others...
+ )
if Depth isa Integer
Depth = 1:Depth
end
@@ -142,8 +145,8 @@ end
## Other nodes
struct MetaNode <: AbstractDocumenterBlock
- codeblock :: MarkdownAST.CodeBlock
- dict :: Dict{Symbol, Any}
+ codeblock::MarkdownAST.CodeBlock
+ dict::Dict{Symbol,Any}
end
struct MethodNode
@@ -152,26 +155,26 @@ struct MethodNode
end
struct DocsNode <: AbstractDocumenterBlock
- anchor :: Anchor
- object :: Object
- page :: Documenter.Page
+ anchor :: Anchor
+ object :: Object
+ page :: Documenter.Page
# MarkdownAST support.
# TODO: should be the docstring components (i.e. .mdasts) be stored as child nodes?
mdasts :: Vector{MarkdownAST.Node{Nothing}}
results :: Vector{Base.Docs.DocStr}
- metas :: Vector{Dict{Symbol, Any}}
+ metas :: Vector{Dict{Symbol,Any}}
function DocsNode(anchor, object, page)
new(anchor, object, page, [], [], [])
end
end
struct DocsNodes
- nodes :: Vector{Union{DocsNode,Markdown.Admonition}}
+ nodes::Vector{Union{DocsNode,Markdown.Admonition}}
end
struct EvalNode <: AbstractDocumenterBlock
- codeblock :: MarkdownAST.CodeBlock
- result :: Union{MarkdownAST.Node, Nothing}
+ codeblock::MarkdownAST.CodeBlock
+ result::Union{MarkdownAST.Node,Nothing}
end
struct RawNode <: AbstractDocumenterBlock
@@ -184,14 +187,14 @@ end
# In the MarkdownAST representation, the dictionaries get converted into
# MultiOutputElement elements.
struct MultiOutput <: AbstractDocumenterBlock
- codeblock :: MarkdownAST.CodeBlock
+ codeblock::MarkdownAST.CodeBlock
end
# For @repl blocks we store the inputs and outputs as separate Markdown.Code
# objects, and then combine them in the writer. When converting to MarkdownAST,
# those separate code blocks become child nodes.
struct MultiCodeBlock <: AbstractDocumenterBlock
- codeblock :: MarkdownAST.CodeBlock
+ codeblock::MarkdownAST.CodeBlock
language::String
content::Vector{Markdown.Code}
end
@@ -236,19 +239,20 @@ mutable struct NavNode
`nothing` if the `NavNode` is a non-page node of the navigation tree, otherwise
the string should be a valid key in `doc.blueprint.pages`
"""
- page :: Union{String, Nothing}
+ page::Union{String,Nothing}
"""
If not `nothing`, specifies the text that should be displayed in navigation
links etc. instead of the automatically determined text.
"""
- title_override :: Union{String, Nothing}
- parent :: Union{NavNode, Nothing}
+ title_override :: Union{String,Nothing}
+ parent :: Union{NavNode,Nothing}
children :: Vector{NavNode}
visible :: Bool
- prev :: Union{NavNode, Nothing}
- next :: Union{NavNode, Nothing}
+ prev :: Union{NavNode,Nothing}
+ next :: Union{NavNode,Nothing}
end
-NavNode(page, title_override, parent) = NavNode(page, title_override, parent, [], true, nothing, nothing)
+NavNode(page, title_override, parent) =
+ NavNode(page, title_override, parent, [], true, nothing, nothing)
# This method ensures that we do not print the whole navtree in case we ever happen to print
# a NavNode in some debug output somewhere.
function Base.show(io::IO, n::NavNode)
@@ -261,8 +265,8 @@ Constructs a list of the ancestors of the `navnode` (including the `navnode` its
ordered so that the root of the navigation tree is the first and `navnode` itself
is the last item.
"""
-navpath(navnode::NavNode) = navnode.parent === nothing ? [navnode] :
- push!(navpath(navnode.parent), navnode)
+navpath(navnode::NavNode) =
+ navnode.parent === nothing ? [navnode] : push!(navpath(navnode.parent), navnode)
# Inner Document Fields.
@@ -284,7 +288,8 @@ function RemoteRepository(root::AbstractString, remote::Remotes.Remote)
RemoteRepository(realpath(root), remote, repo_commit(root))
catch e
e isa RepoCommitError || rethrow()
- @error "Unable to determine the commit for the remote repository:\n$(e.msg)" e.directory exception = e.err_bt
+ @error "Unable to determine the commit for the remote repository:\n$(e.msg)" e.directory exception =
+ e.err_bt
throw(ArgumentError("""
Unable to determine the commit for the remote repository
at $(root) => $(remote)"""))
@@ -296,27 +301,28 @@ $(SIGNATURES)
Returns the first 5 characters of the current Git commit hash of the remote.
"""
-shortcommit(remoteref::RemoteRepository) = (length(remoteref.commit) > 5) ? remoteref.commit[1:5] : remoteref.commit
+shortcommit(remoteref::RemoteRepository) =
+ (length(remoteref.commit) > 5) ? remoteref.commit[1:5] : remoteref.commit
"""
User-specified values used to control the generation process.
"""
struct User
- root :: String # An absolute path to the root directory of the document.
- source :: String # Parent directory is `.root`. Where files are read from.
- build :: String # Parent directory is also `.root`. Where files are written to.
- workdir :: Union{Symbol,String} # Parent directory is also `.root`. Where code is executed from.
- format :: Vector{Writer} # What format to render the final document with?
- clean :: Bool # Empty the `build` directory before starting a new build?
- doctest :: Union{Bool,Symbol} # Run doctests?
+ root::String # An absolute path to the root directory of the document.
+ source::String # Parent directory is `.root`. Where files are read from.
+ build::String # Parent directory is also `.root`. Where files are written to.
+ workdir::Union{Symbol,String} # Parent directory is also `.root`. Where code is executed from.
+ format::Vector{Writer} # What format to render the final document with?
+ clean::Bool # Empty the `build` directory before starting a new build?
+ doctest::Union{Bool,Symbol} # Run doctests?
linkcheck::Bool # Check external links..
linkcheck_ignore::Vector{Union{String,Regex}} # ..and then ignore (some of) them.
linkcheck_timeout::Real # ..but only wait this many seconds for each one.
checkdocs::Symbol # Check objects missing from `@docs` blocks. `:none`, `:exports`, or `:all`.
doctestfilters::Vector{Regex} # Filtering for doctests
warnonly::Vector{Symbol} # List of docerror groups that should only warn, rather than cause a build failure
- pages :: Vector{Any} # Ordering of document pages specified by the user.
- pagesonly :: Bool # Discard any .md pages from processing that are not in .pages
+ pages::Vector{Any} # Ordering of document pages specified by the user.
+ pagesonly::Bool # Discard any .md pages from processing that are not in .pages
expandfirst::Vector{String} # List of pages that get "expanded" before others
# Remote Git repository information
#
@@ -333,32 +339,32 @@ struct User
# While the initial list in .remotes is populated when we construct the Document
# object, we also dynamically add links to the .remotes array as we check different
# files, by looking at .git directories.
- remote :: Union{Remotes.Remote,Nothing}
- remotes :: Union{Vector{RemoteRepository},Nothing}
- sitename:: String
- authors :: String
- version :: String # version string used in the version selector by default
+ remote::Union{Remotes.Remote,Nothing}
+ remotes::Union{Vector{RemoteRepository},Nothing}
+ sitename::String
+ authors::String
+ version::String # version string used in the version selector by default
highlightsig::Bool # assume leading unlabeled code blocks in docstrings to be Julia.
- draft :: Bool
+ draft::Bool
end
"""
Private state used to control the generation process.
"""
struct Internal
- assets :: String # Path where asset files will be copied to.
- navtree :: Vector{NavNode} # A vector of top-level navigation items.
- navlist :: Vector{NavNode} # An ordered list of `NavNode`s that point to actual pages
- headers :: AnchorMap # See `modules/Anchors.jl`. Tracks `Markdown.Header` objects.
- docs :: AnchorMap # See `modules/Anchors.jl`. Tracks `@docs` docstrings.
- bindings:: IdDict{Any,Any} # Tracks insertion order of object per-binding.
- objects :: IdDict{Any,Any} # Tracks which `Objects` are included in the `Document`.
- contentsnodes :: Vector{ContentsNode}
- indexnodes :: Vector{IndexNode}
- locallinks :: IdDict{MarkdownAST.Link, String}
+ assets::String # Path where asset files will be copied to.
+ navtree::Vector{NavNode} # A vector of top-level navigation items.
+ navlist::Vector{NavNode} # An ordered list of `NavNode`s that point to actual pages
+ headers::AnchorMap # See `modules/Anchors.jl`. Tracks `Markdown.Header` objects.
+ docs::AnchorMap # See `modules/Anchors.jl`. Tracks `@docs` docstrings.
+ bindings::IdDict{Any,Any} # Tracks insertion order of object per-binding.
+ objects::IdDict{Any,Any} # Tracks which `Objects` are included in the `Document`.
+ contentsnodes::Vector{ContentsNode}
+ indexnodes::Vector{IndexNode}
+ locallinks::IdDict{MarkdownAST.Link,String}
errors::Set{Symbol}
- src_to_uuid::Dict{String, Base.UUID} # These two are used to cache information from Pkg
- uuid_to_version_info::Dict{Base.UUID, Tuple{VersionNumber, String}}
+ src_to_uuid::Dict{String,Base.UUID} # These two are used to cache information from Pkg
+ uuid_to_version_info::Dict{Base.UUID,Tuple{VersionNumber,String}}
end
# Document.
@@ -368,40 +374,40 @@ end
Represents an entire document.
"""
struct Document
- user :: User # Set by the user via `makedocs`.
- internal :: Internal # Computed values.
- plugins :: Dict{DataType, Plugin}
- blueprint :: DocumentBlueprint
+ user::User # Set by the user via `makedocs`.
+ internal::Internal # Computed values.
+ plugins::Dict{DataType,Plugin}
+ blueprint::DocumentBlueprint
end
function Document(;
- root :: AbstractString = currentdir(),
- source :: AbstractString = "src",
- build :: AbstractString = "build",
- workdir :: Union{Symbol, AbstractString} = :build,
- format :: Any = HTML(),
- clean :: Bool = true,
- doctest :: Union{Bool,Symbol} = true,
- linkcheck:: Bool = false,
- linkcheck_ignore :: Vector = [],
- linkcheck_timeout :: Real = 10,
- checkdocs::Symbol = :all,
- doctestfilters::Vector{Regex}= Regex[],
- warnonly :: Union{Bool,Symbol,Vector{Symbol}} = Symbol[],
- modules :: ModVec = Module[],
- pages :: Vector = Any[],
- pagesonly:: Bool = false,
- expandfirst :: Vector = String[],
- plugins :: Vector = Plugin[],
- repo :: Union{Remotes.Remote, AbstractString} = "",
- remotes :: Union{Dict, Nothing} = Dict(),
- sitename :: AbstractString = "",
- authors :: AbstractString = "",
- version :: AbstractString = "",
- highlightsig::Bool = true,
- draft::Bool = false,
- others...
- )
+ root::AbstractString=currentdir(),
+ source::AbstractString="src",
+ build::AbstractString="build",
+ workdir::Union{Symbol,AbstractString}=:build,
+ format::Any=HTML(),
+ clean::Bool=true,
+ doctest::Union{Bool,Symbol}=true,
+ linkcheck::Bool=false,
+ linkcheck_ignore::Vector=[],
+ linkcheck_timeout::Real=10,
+ checkdocs::Symbol=:all,
+ doctestfilters::Vector{Regex}=Regex[],
+ warnonly::Union{Bool,Symbol,Vector{Symbol}}=Symbol[],
+ modules::ModVec=Module[],
+ pages::Vector=Any[],
+ pagesonly::Bool=false,
+ expandfirst::Vector=String[],
+ plugins::Vector=Plugin[],
+ repo::Union{Remotes.Remote,AbstractString}="",
+ remotes::Union{Dict,Nothing}=Dict(),
+ sitename::AbstractString="",
+ authors::AbstractString="",
+ version::AbstractString="",
+ highlightsig::Bool=true,
+ draft::Bool=false,
+ others...
+)
if !isempty(others)
msg = "makedocs() got passed invalid keyword arguments:"
@@ -474,25 +480,25 @@ function Document(;
IdDict{Any,Any}(),
[],
[],
- Dict{Markdown.Link, String}(),
+ Dict{Markdown.Link,String}(),
Set{Symbol}(),
- Dict{String, String}(),
- Dict{String, Tuple{String, String}}()
+ Dict{String,String}(),
+ Dict{String,Tuple{String,String}}()
)
- plugin_dict = Dict{DataType, Plugin}()
+ plugin_dict = Dict{DataType,Plugin}()
for plugin in plugins
- plugin isa Plugin ||
- throw(ArgumentError("$(typeof(plugin)) in `plugins=` is not a subtype of `Documenter.Plugin`."))
+ plugin isa Plugin || throw(
+ ArgumentError(
+ "$(typeof(plugin)) in `plugins=` is not a subtype of `Documenter.Plugin`."
+ )
+ )
haskey(plugin_dict, typeof(plugin)) &&
throw(ArgumentError("only one copy of $(typeof(plugin)) may be passed."))
plugin_dict[typeof(plugin)] = plugin
end
- blueprint = DocumentBlueprint(
- Dict{String, Page}(),
- submodules(modules),
- )
+ blueprint = DocumentBlueprint(Dict{String,Page}(), submodules(modules),)
Document(user, internal, plugin_dict, blueprint)
end
@@ -505,7 +511,9 @@ function interpret_repo_and_remotes(; root, repo, remotes)
# argument of makedocs)
path = joinpath(root, path)
if !isdir(path)
- throw(ArgumentError(("Invalid local path in remotes (not a directory): $(path)")))
+ throw(
+ ArgumentError(("Invalid local path in remotes (not a directory): $(path)"))
+ )
end
path = realpath(path)
# We'll also check that there are no duplicate entries.
@@ -517,7 +525,7 @@ function interpret_repo_and_remotes(; root, repo, remotes)
"""))
end
# Now we actually check the remotes themselves
- remote = if remoteref isa Tuple{Remotes.Remote, AbstractString}
+ remote = if remoteref isa Tuple{Remotes.Remote,AbstractString}
RemoteRepository(path, remoteref[1], remoteref[2])
elseif remoteref isa Remotes.Remote
RemoteRepository(path, remoteref)
@@ -531,17 +539,18 @@ function interpret_repo_and_remotes(; root, repo, remotes)
end
# We'll normalize repo to be a `Remotes.Remote` object (or nothing if omitted)
- repo_normalized::Union{Remotes.Remote, Nothing} = if isa(repo, AbstractString) && isempty(repo)
- # If the user does not provide the `repo` argument, we'll try to automatically
- # detect the remote repository later. But for now, we'll set it to `nothing`.
- nothing
- elseif repo isa AbstractString
- # Use the old template string parsing logic if a string was passed.
- Remotes.URL(repo)
- else
- # Otherwise it should be some Remote object, so we'll just use that.
- repo
- end
+ repo_normalized::Union{Remotes.Remote,Nothing} =
+ if isa(repo, AbstractString) && isempty(repo)
+ # If the user does not provide the `repo` argument, we'll try to automatically
+ # detect the remote repository later. But for now, we'll set it to `nothing`.
+ nothing
+ elseif repo isa AbstractString
+ # Use the old template string parsing logic if a string was passed.
+ Remotes.URL(repo)
+ else
+ # Otherwise it should be some Remote object, so we'll just use that.
+ repo
+ end
# Now we sort out the interaction between `repo` and `remotes`. Our goal is to make sure that we have a
# value in both remotes for the repository root, and also that we get the correct value for the main
@@ -562,8 +571,9 @@ function interpret_repo_and_remotes(; root, repo, remotes)
end
return false
end
- makedocs_root_repo::Union{String, Nothing} = find_root_parent(is_git_repo_root, root)
- makedocs_root_remote::Union{Remotes.Remote, Nothing} = isnothing(makedocs_root_repo) ? nothing : getremote(makedocs_root_repo)
+ makedocs_root_repo::Union{String,Nothing} = find_root_parent(is_git_repo_root, root)
+ makedocs_root_remote::Union{Remotes.Remote,Nothing} =
+ isnothing(makedocs_root_repo) ? nothing : getremote(makedocs_root_repo)
@debug "interpret_repo_and_remotes" remotes_checked repo_normalized makedocs_root_remoteref makedocs_root_repo makedocs_root_remote
if !isnothing(makedocs_root_remoteref) && !isnothing(makedocs_root_repo)
# If both are set, then there is potential for conflict.
@@ -587,7 +597,7 @@ function interpret_repo_and_remotes(; root, repo, remotes)
# with remotes. In that case, the remote in `remotes` takes precedence as well.
@debug "Remotes: `remotes` takes precedence over automatically determined remote" makedocs_root_remoteref makedocs_root_repo makedocs_root_remote repo_normalized
makedocs_root_remote = makedocs_root_remoteref.remote
- elseif startswith(makedocs_root_remoteref.root, makedocs_root_repo)
+ elseif startswith(makedocs_root_remoteref.root, makedocs_root_repo)
# In this case we determined that root of the repository is more specific than
# whatever we found in remotes. So the main remote will be determined from the Git
# repository. This will be a no-op, except that `repo` argument may override the
@@ -599,7 +609,10 @@ function interpret_repo_and_remotes(; root, repo, remotes)
@debug "Remotes: repo not set, using automatically determined remote" makedocs_root_remoteref makedocs_root_repo makedocs_root_remote repo_normalized
end
# Since this path was not in remotes, we also need to add it there.
- addremote!(remotes_checked, RemoteRepository(makedocs_root_repo, makedocs_root_remote))
+ addremote!(
+ remotes_checked,
+ RemoteRepository(makedocs_root_repo, makedocs_root_remote)
+ )
else
# The final case is where the two repo paths have different roots, which should never
# happen.
@@ -640,7 +653,10 @@ function interpret_repo_and_remotes(; root, repo, remotes)
throw(ArgumentError(err))
end
# Since this path was not in remotes, we also need to add it there.
- addremote!(remotes_checked, RemoteRepository(makedocs_root_repo, makedocs_root_remote))
+ addremote!(
+ remotes_checked,
+ RemoteRepository(makedocs_root_repo, makedocs_root_remote)
+ )
else
# Finally, if we're neither in a git repo, and nothing is in remotes,
err = "Unable to automatically determine remote for main repo."
@@ -705,12 +721,13 @@ function addremote!(remotes::Vector{RemoteRepository}, remoteref::RemoteReposito
sortremotes!(remotes)
return nothing
end
-addremote!(doc::Document, remoteref::RemoteRepository) = addremote!(doc.user.remotes, remoteref)
+addremote!(doc::Document, remoteref::RemoteRepository) =
+ addremote!(doc.user.remotes, remoteref)
# We'll sort the remotes, first, to make sure that the longer paths come first,
# so that we could match them first. How the individual paths are sorted is pretty
# unimportant, but we just want to make sure they are sorted in some well-defined
# order.
-sortremotes!(remotes::Vector{RemoteRepository}) = sort!(remotes, lt = lt_remotepair)
+sortremotes!(remotes::Vector{RemoteRepository}) = sort!(remotes, lt=lt_remotepair)
function lt_remotepair(r1::RemoteRepository, r2::RemoteRepository)
if length(r1.root) == length(r2.root)
return r1.root < r2.root
@@ -846,7 +863,7 @@ function relpath_from_remote_root(doc::Document, path::AbstractString)
else
# When root_remote is set, so should be root_directory
@assert !isnothing(root_directory)
- return (; repo = root_remote, relpath = relpath(path, root_directory))
+ return (; repo=root_remote, relpath=relpath(path, root_directory))
end
end
@@ -870,9 +887,8 @@ function edit_url(doc::Document, path; rev::Union{AbstractString,Nothing})
return repofile(remoteref.repo.remote, rev, remoteref.relpath)
end
-source_url(doc::Document, docstring) = source_url(
- doc, docstring.data[:module], docstring.data[:path], linerange(docstring)
-)
+source_url(doc::Document, docstring) =
+ source_url(doc, docstring.data[:module], docstring.data[:path], linerange(docstring))
function source_url(doc::Document, mod::Module, file::AbstractString, linerange)
# If the user has disable remote links, we abort immediately
@@ -892,10 +908,15 @@ function source_url(doc::Document, mod::Module, file::AbstractString, linerange)
isfile(file) || return nothing
remoteref = relpath_from_remote_root(doc, file)
if isnothing(remoteref)
- throw(MissingRemoteError(; path = file, linerange, mod))
+ throw(MissingRemoteError(; path=file, linerange, mod))
end
@debug "source_url" mod file linerange remoteref
- return repofile(remoteref.repo.remote, remoteref.repo.commit, remoteref.relpath, linerange)
+ return repofile(
+ remoteref.repo.remote,
+ remoteref.repo.commit,
+ remoteref.relpath,
+ linerange
+ )
end
"""
@@ -906,7 +927,7 @@ object of type `T` was an element of the `plugins` list passed to [`makedocs`](@
that object will be returned. Otherwise, a new `T` object will be created using the default
constructor `T()`. Subsequent calls to `getplugin(doc, T)` return the same object.
"""
-function getplugin(doc::Document, plugin_type::Type{T}) where T <: Plugin
+function getplugin(doc::Document, plugin_type::Type{T}) where {T<:Plugin}
if !haskey(doc.plugins, plugin_type)
doc.plugins[plugin_type] = plugin_type()
end
@@ -916,7 +937,12 @@ end
## Methods
-function addpage!(doc::Document, src::AbstractString, dst::AbstractString, wd::AbstractString)
+function addpage!(
+ doc::Document,
+ src::AbstractString,
+ dst::AbstractString,
+ wd::AbstractString
+)
page = Page(src, dst, wd)
# page's identifier is the path relative to the `doc.user.source` directory
name = pagekey(doc, page)
@@ -931,10 +957,7 @@ end
# relative to doc.user.root). This function calculates the key corresponding
# to a page.
pagekey(doc::Document, page::Page) = normpath(
- relpath(
- joinpath(doc.user.root, page.source),
- joinpath(doc.user.root, doc.user.source)
- )
+ relpath(joinpath(doc.user.root, page.source), joinpath(doc.user.root, doc.user.source))
)
"""
@@ -960,8 +983,11 @@ function populate!(index::IndexNode, document::Document)
page = relpath(doc.page.build, dirname(index.build))
mod = object.binding.mod
# Include *all* signatures, whether they are `Union{}` or not.
- cat = Symbol(lowercase(doccat(object.binding, Union{})))
- if is_canonical(object) && _isvalid(page, index.pages) && _isvalid(mod, index.modules) && _isvalid(cat, index.order)
+ cat = Symbol(lowercase(doccat(object.binding, Union{})))
+ if is_canonical(object) &&
+ _isvalid(page, index.pages) &&
+ _isvalid(mod, index.modules) &&
+ _isvalid(cat, index.order)
push!(index.elements, (object, doc, page, mod, cat))
end
end
@@ -969,13 +995,13 @@ function populate!(index::IndexNode, document::Document)
pagesmap = precedence(index.pages)
modulesmap = precedence(index.modules)
ordermap = precedence(index.order)
- comparison = function(a, b)
- (x = _compare(pagesmap, 3, a, b)) == 0 || return x < 0 # page
+ comparison = function (a, b)
+ (x = _compare(pagesmap, 3, a, b)) == 0 || return x < 0 # page
(x = _compare(modulesmap, 4, a, b)) == 0 || return x < 0 # module
- (x = _compare(ordermap, 5, a, b)) == 0 || return x < 0 # category
+ (x = _compare(ordermap, 5, a, b)) == 0 || return x < 0 # category
string(a[1].binding) < string(b[1].binding) # object name
end
- sort!(index.elements, lt = comparison)
+ sort!(index.elements, lt=comparison)
return index
end
@@ -995,11 +1021,11 @@ function populate!(contents::ContentsNode, document::Document)
end
# Sorting contents links.
pagesmap = precedence(contents.pages)
- comparison = function(a, b)
+ comparison = function (a, b)
(x = _compare(pagesmap, 2, a, b)) == 0 || return x < 0 # page
a[1] < b[1] # anchor order
end
- sort!(contents.elements, lt = comparison)
+ sort!(contents.elements, lt=comparison)
return contents
end
@@ -1018,8 +1044,9 @@ doctest_replace!(docsnode::DocsNode) = foreach(doctest_replace!, docsnode.mdasts
function doctest_replace!(block::MarkdownAST.CodeBlock)
startswith(block.info, "jldoctest") || return
# suppress output for `#output`-style doctests with `output=false` kwarg
- if occursin(r"^# output$"m, block.code) && occursin(r";.*output\h*=\h*false", block.info)
- input = first(split(block.code, "# output\n", limit = 2))
+ if occursin(r"^# output$"m, block.code) &&
+ occursin(r";.*output\h*=\h*false", block.info)
+ input = first(split(block.code, "# output\n", limit=2))
block.code = rstrip(input)
end
# correct the language field
@@ -1029,7 +1056,7 @@ doctest_replace!(@nospecialize _) = nothing
function buildnode(T::Type, block, doc, page)
mod = get(page.globals.meta, :CurrentModule, Main)
- dict = Dict{Symbol, Any}(:source => page.source, :build => page.build)
+ dict = Dict{Symbol,Any}(:source => page.source, :build => page.build)
for (ex, str) in parseblock(block.code, doc, page)
if isassign(ex)
cd(dirname(page.source)) do
@@ -1052,7 +1079,7 @@ precedence(vec) = Dict(zip(vec, 1:length(vec)))
# Conversion to MarkdownAST, for writers
struct AnchoredHeader <: AbstractDocumenterBlock
- anchor :: Anchor
+ anchor::Anchor
end
MarkdownAST.iscontainer(::AnchoredHeader) = true
@@ -1061,25 +1088,26 @@ MarkdownAST.iscontainer(::AnchoredHeader) = true
# In addition, the child node can also be an Admonition in case there was an error
# in splicing in a docstring.
struct DocsNodesBlock <: AbstractDocumenterBlock
- codeblock :: MarkdownAST.CodeBlock
+ codeblock::MarkdownAST.CodeBlock
end
MarkdownAST.iscontainer(::DocsNodesBlock) = true
MarkdownAST.can_contain(::DocsNodesBlock, ::MarkdownAST.AbstractElement) = false
-MarkdownAST.can_contain(::DocsNodesBlock, ::Union{DocsNode, MarkdownAST.Admonition}) = true
+MarkdownAST.can_contain(::DocsNodesBlock, ::Union{DocsNode,MarkdownAST.Admonition}) = true
MarkdownAST.iscontainer(::MultiCodeBlock) = true
MarkdownAST.can_contain(::MultiCodeBlock, ::MarkdownAST.Code) = true
struct MultiOutputElement <: AbstractDocumenterBlock
- element :: Any
+ element::Any
end
MarkdownAST.iscontainer(::MultiOutput) = true
-MarkdownAST.can_contain(::MultiOutput, ::Union{MultiOutputElement,MarkdownAST.CodeBlock}) = true
+MarkdownAST.can_contain(::MultiOutput, ::Union{MultiOutputElement,MarkdownAST.CodeBlock}) =
+ true
# In the SetupBlocks expander, we map @setup nodes to Markdown.MD() objects
struct SetupNode <: AbstractDocumenterBlock
- name :: String
- code :: String
+ name::String
+ code::String
end
# Override the show for DocumenterBlockTypes so that we would not print too much
@@ -1087,10 +1115,14 @@ end
Base.show(io::IO, node::AbstractDocumenterBlock) = print(io, typeof(node), "([...])")
# Extend MDFlatten.mdflatten to support the Documenter-specific elements
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::AnchoredHeader) = MDFlatten.mdflatten(io, node.children)
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::SetupNode) = MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock(e.name, e.code))
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::RawNode) = MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock("@raw $(e.name)", e.text))
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::AbstractDocumenterBlock) = MDFlatten.mdflatten(io, node, e.codeblock)
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::AnchoredHeader) =
+ MDFlatten.mdflatten(io, node.children)
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::SetupNode) =
+ MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock(e.name, e.code))
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::RawNode) =
+ MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock("@raw $(e.name)", e.text))
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::AbstractDocumenterBlock) =
+ MDFlatten.mdflatten(io, node, e.codeblock)
function MDFlatten.mdflatten(io, ::MarkdownAST.Node, e::DocsNode)
# this special case separates top level blocks with newlines
for node in e.mdasts
@@ -1100,8 +1132,10 @@ function MDFlatten.mdflatten(io, ::MarkdownAST.Node, e::DocsNode)
print(io, "\n\n\n\n")
end
end
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::PageLink) = MDFlatten.mdflatten(io, node.children)
-MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalLink) = MDFlatten.mdflatten(io, node.children)
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::PageLink) =
+ MDFlatten.mdflatten(io, node.children)
+MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalLink) =
+ MDFlatten.mdflatten(io, node.children)
function MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalImage)
print(io, "(Image: ")
MDFlatten.mdflatten(io, node.children)
diff --git a/src/expander_pipeline.jl b/src/expander_pipeline.jl
index dcaa707765..4b9b2b9b81 100644
--- a/src/expander_pipeline.jl
+++ b/src/expander_pipeline.jl
@@ -31,12 +31,8 @@ Similar to `expand()`, but recursively calls itself on all descendants of `node`
and applies `NestedExpanderPipeline` instead of `ExpanderPipeline`.
"""
function expand_recursively(node, page, doc)
- if typeof(node.element) in (
- MarkdownAST.Admonition,
- MarkdownAST.BlockQuote,
- MarkdownAST.Item,
- MarkdownAST.List,
- )
+ if typeof(node.element) in
+ (MarkdownAST.Admonition, MarkdownAST.BlockQuote, MarkdownAST.Item, MarkdownAST.List,)
for child in node.children
Selectors.dispatch(Expanders.NestedExpanderPipeline, child, page, doc)
expand_recursively(child, page, doc)
@@ -47,7 +43,8 @@ end
# run some checks after expanding the page
function pagecheck(page)
# make sure there is no "continued code" lingering around
- if haskey(page.globals.meta, :ContinuedCode) && !isempty(page.globals.meta[:ContinuedCode])
+ if haskey(page.globals.meta, :ContinuedCode) &&
+ !isempty(page.globals.meta[:ContinuedCode])
@warn "code from a continued @example block unused in $(Documenter.locrepr(page.source))."
end
end
@@ -59,162 +56,169 @@ function create_draft_result!(node::Node; blocktype="code")
codeblock.info = "julia"
node.element = Documenter.MultiOutput(codeblock)
push!(node.children, Node(codeblock))
- push!(node.children, Node(Documenter.MultiOutputElement(
- Dict{MIME,Any}(MIME"text/plain"() => "<< $(blocktype)-block not executed in draft mode >>")
- )))
+ push!(
+ node.children,
+ Node(
+ Documenter.MultiOutputElement(
+ Dict{MIME,Any}(
+ MIME"text/plain"() => "<< $(blocktype)-block not executed in draft mode >>"
+ )
+ )
+ )
+ )
end
# Expander Pipeline.
module Expanders
- import ..Documenter # for docstring references
- import ..Documenter.Selectors
-
- """
- The default node expander "pipeline", which consists of the following expanders:
-
- - [`TrackHeaders`](@ref)
- - [`MetaBlocks`](@ref)
- - [`DocsBlocks`](@ref)
- - [`AutoDocsBlocks`](@ref)
- - [`EvalBlocks`](@ref)
- - [`IndexBlocks`](@ref)
- - [`ContentsBlocks`](@ref)
- - [`ExampleBlocks`](@ref)
- - [`SetupBlocks`](@ref)
- - [`REPLBlocks`](@ref)
-
- """
- abstract type ExpanderPipeline <: Selectors.AbstractSelector end
-
- """
- The subset of [node expanders](@ref ExpanderPipeline) which also apply in nested contexts.
-
- See also [`expand_recursively`](@ref Documenter.expand_recursively).
- """
- abstract type NestedExpanderPipeline <: ExpanderPipeline end
-
- """
- Tracks all `Markdown.Header` nodes found in the parsed markdown files and stores an
- [`Anchor`](@ref Documenter.Anchor) object for each one.
- """
- abstract type TrackHeaders <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@meta` and evaluates the key/value pairs found
- within the block, i.e.
-
- ````markdown
- ```@meta
- CurrentModule = Documenter
- DocTestSetup = quote
- using Documenter
- end
- ```
- ````
- """
- abstract type MetaBlocks <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@docs` and evaluates the expressions found
- within the block. Replaces the block with the docstrings associated with each expression.
-
- ````markdown
- ```@docs
- Documenter
- makedocs
- deploydocs
- ```
- ````
- """
- abstract type DocsBlocks <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@autodocs` and replaces it with all the
- docstrings that match the provided key/value pairs `Modules = ...` and `Order = ...`.
-
- ````markdown
- ```@autodocs
- Modules = [Foo, Bar]
- Order = [:function, :type]
- ```
- ````
- """
- abstract type AutoDocsBlocks <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@eval` and evaluates it's content. Replaces
- the block with the value resulting from the evaluation. This can be useful for inserting
- generated content into a document such as plots.
-
- ````markdown
- ```@eval
- using PyPlot
- x = linspace(-π, π)
- y = sin(x)
- plot(x, y, color = "red")
- savefig("plot.svg")
- Markdown.parse("![Plot](plot.svg)")
- ```
- ````
- """
- abstract type EvalBlocks <: NestedExpanderPipeline end
-
- abstract type RawBlocks <: NestedExpanderPipeline end
-
- """
- Parses each code block where the language is `@index` and replaces it with an index of all
- docstrings spliced into the document. The pages that are included can be set using a
- key/value pair `Pages = [...]` such as
-
- ````markdown
- ```@index
- Pages = ["foo.md", "bar.md"]
- ```
- ````
- """
- abstract type IndexBlocks <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@contents` and replaces it with a nested list
- of all `Header` nodes in the generated document. The pages and depth of the list can be set
- using `Pages = [...]` and `Depth = N` where `N` is and integer.
-
- ````markdown
- ```@contents
- Pages = ["foo.md", "bar.md"]
- Depth = 1
- ```
- ````
- The default `Depth` value is `2`.
- """
- abstract type ContentsBlocks <: ExpanderPipeline end
-
- """
- Parses each code block where the language is `@example` and evaluates the parsed Julia code
- found within. The resulting value is then inserted into the final document after the source
- code.
-
- ````markdown
- ```@example
- a = 1
- b = 2
- a + b
- ```
- ````
- """
- abstract type ExampleBlocks <: NestedExpanderPipeline end
-
- """
- Similar to the [`ExampleBlocks`](@ref) expander, but inserts a Julia REPL prompt before each
- toplevel expression in the final document.
- """
- abstract type REPLBlocks <: NestedExpanderPipeline end
-
- """
- Similar to the [`ExampleBlocks`](@ref) expander, but hides all output in the final document.
- """
- abstract type SetupBlocks <: NestedExpanderPipeline end
+import ..Documenter # for docstring references
+import ..Documenter.Selectors
+
+"""
+The default node expander "pipeline", which consists of the following expanders:
+
+- [`TrackHeaders`](@ref)
+- [`MetaBlocks`](@ref)
+- [`DocsBlocks`](@ref)
+- [`AutoDocsBlocks`](@ref)
+- [`EvalBlocks`](@ref)
+- [`IndexBlocks`](@ref)
+- [`ContentsBlocks`](@ref)
+- [`ExampleBlocks`](@ref)
+- [`SetupBlocks`](@ref)
+- [`REPLBlocks`](@ref)
+
+"""
+abstract type ExpanderPipeline <: Selectors.AbstractSelector end
+
+"""
+The subset of [node expanders](@ref ExpanderPipeline) which also apply in nested contexts.
+
+See also [`expand_recursively`](@ref Documenter.expand_recursively).
+"""
+abstract type NestedExpanderPipeline <: ExpanderPipeline end
+
+"""
+Tracks all `Markdown.Header` nodes found in the parsed markdown files and stores an
+[`Anchor`](@ref Documenter.Anchor) object for each one.
+"""
+abstract type TrackHeaders <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@meta` and evaluates the key/value pairs found
+within the block, i.e.
+
+````markdown
+```@meta
+CurrentModule = Documenter
+DocTestSetup = quote
+ using Documenter
+end
+```
+````
+"""
+abstract type MetaBlocks <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@docs` and evaluates the expressions found
+within the block. Replaces the block with the docstrings associated with each expression.
+
+````markdown
+```@docs
+Documenter
+makedocs
+deploydocs
+```
+````
+"""
+abstract type DocsBlocks <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@autodocs` and replaces it with all the
+docstrings that match the provided key/value pairs `Modules = ...` and `Order = ...`.
+
+````markdown
+```@autodocs
+Modules = [Foo, Bar]
+Order = [:function, :type]
+```
+````
+"""
+abstract type AutoDocsBlocks <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@eval` and evaluates it's content. Replaces
+the block with the value resulting from the evaluation. This can be useful for inserting
+generated content into a document such as plots.
+
+````markdown
+```@eval
+using PyPlot
+x = linspace(-π, π)
+y = sin(x)
+plot(x, y, color = "red")
+savefig("plot.svg")
+Markdown.parse("![Plot](plot.svg)")
+```
+````
+"""
+abstract type EvalBlocks <: NestedExpanderPipeline end
+
+abstract type RawBlocks <: NestedExpanderPipeline end
+
+"""
+Parses each code block where the language is `@index` and replaces it with an index of all
+docstrings spliced into the document. The pages that are included can be set using a
+key/value pair `Pages = [...]` such as
+
+````markdown
+```@index
+Pages = ["foo.md", "bar.md"]
+```
+````
+"""
+abstract type IndexBlocks <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@contents` and replaces it with a nested list
+of all `Header` nodes in the generated document. The pages and depth of the list can be set
+using `Pages = [...]` and `Depth = N` where `N` is and integer.
+
+````markdown
+```@contents
+Pages = ["foo.md", "bar.md"]
+Depth = 1
+```
+````
+The default `Depth` value is `2`.
+"""
+abstract type ContentsBlocks <: ExpanderPipeline end
+
+"""
+Parses each code block where the language is `@example` and evaluates the parsed Julia code
+found within. The resulting value is then inserted into the final document after the source
+code.
+
+````markdown
+```@example
+a = 1
+b = 2
+a + b
+```
+````
+"""
+abstract type ExampleBlocks <: NestedExpanderPipeline end
+
+"""
+Similar to the [`ExampleBlocks`](@ref) expander, but inserts a Julia REPL prompt before each
+toplevel expression in the final document.
+"""
+abstract type REPLBlocks <: NestedExpanderPipeline end
+
+"""
+Similar to the [`ExampleBlocks`](@ref) expander, but hides all output in the final document.
+"""
+abstract type SetupBlocks <: NestedExpanderPipeline end
end
Selectors.order(::Type{Expanders.TrackHeaders}) = 1.0
@@ -229,17 +233,17 @@ Selectors.order(::Type{Expanders.REPLBlocks}) = 9.0
Selectors.order(::Type{Expanders.SetupBlocks}) = 10.0
Selectors.order(::Type{Expanders.RawBlocks}) = 11.0
-Selectors.matcher(::Type{Expanders.TrackHeaders}, node, page, doc) = isa(node.element, MarkdownAST.Heading)
-Selectors.matcher(::Type{Expanders.MetaBlocks}, node, page, doc) = iscode(node, "@meta")
-Selectors.matcher(::Type{Expanders.DocsBlocks}, node, page, doc) = iscode(node, r"^@docs")
+Selectors.matcher(::Type{Expanders.TrackHeaders}, node, page, doc) = isa(node.element, MarkdownAST.Heading)
+Selectors.matcher(::Type{Expanders.MetaBlocks}, node, page, doc) = iscode(node, "@meta")
+Selectors.matcher(::Type{Expanders.DocsBlocks}, node, page, doc) = iscode(node, r"^@docs")
Selectors.matcher(::Type{Expanders.AutoDocsBlocks}, node, page, doc) = iscode(node, r"^@autodocs")
-Selectors.matcher(::Type{Expanders.EvalBlocks}, node, page, doc) = iscode(node, "@eval")
-Selectors.matcher(::Type{Expanders.IndexBlocks}, node, page, doc) = iscode(node, "@index")
+Selectors.matcher(::Type{Expanders.EvalBlocks}, node, page, doc) = iscode(node, "@eval")
+Selectors.matcher(::Type{Expanders.IndexBlocks}, node, page, doc) = iscode(node, "@index")
Selectors.matcher(::Type{Expanders.ContentsBlocks}, node, page, doc) = iscode(node, "@contents")
-Selectors.matcher(::Type{Expanders.ExampleBlocks}, node, page, doc) = iscode(node, r"^@example")
-Selectors.matcher(::Type{Expanders.REPLBlocks}, node, page, doc) = iscode(node, r"^@repl")
-Selectors.matcher(::Type{Expanders.SetupBlocks}, node, page, doc) = iscode(node, r"^@setup")
-Selectors.matcher(::Type{Expanders.RawBlocks}, node, page, doc) = iscode(node, r"^@raw")
+Selectors.matcher(::Type{Expanders.ExampleBlocks}, node, page, doc) = iscode(node, r"^@example")
+Selectors.matcher(::Type{Expanders.REPLBlocks}, node, page, doc) = iscode(node, r"^@repl")
+Selectors.matcher(::Type{Expanders.SetupBlocks}, node, page, doc) = iscode(node, r"^@setup")
+Selectors.matcher(::Type{Expanders.RawBlocks}, node, page, doc) = iscode(node, r"^@raw")
# Default Expander.
@@ -252,22 +256,21 @@ Selectors.runner(::Type{Expanders.NestedExpanderPipeline}, node, page, doc) = no
function Selectors.runner(::Type{Expanders.TrackHeaders}, node, page, doc)
header = node.element
# Get the header slug.
- text =
- if namedheader(node)
- # If the Header is wrappend in an [](@id) link, we remove the Link element from
- # the tree.
- link_node = first(node.children)
- MarkdownAST.unlink!(link_node)
- append!(node.children, link_node.children)
- match(NAMEDHEADER_REGEX, link_node.element.destination)[1]
- else
- # TODO: remove this hack (replace with mdflatten?)
- ast = MarkdownAST.@ast MarkdownAST.Document() do
- MarkdownAST.copy_tree(node)
- end
- md = convert(Markdown.MD, ast)
- sprint(Markdown.plain, Markdown.Paragraph(md.content[1].text))
+ text = if namedheader(node)
+ # If the Header is wrappend in an [](@id) link, we remove the Link element from
+ # the tree.
+ link_node = first(node.children)
+ MarkdownAST.unlink!(link_node)
+ append!(node.children, link_node.children)
+ match(NAMEDHEADER_REGEX, link_node.element.destination)[1]
+ else
+ # TODO: remove this hack (replace with mdflatten?)
+ ast = MarkdownAST.@ast MarkdownAST.Document() do
+ MarkdownAST.copy_tree(node)
end
+ md = convert(Markdown.MD, ast)
+ sprint(Markdown.plain, Markdown.Paragraph(md.content[1].text))
+ end
slug = Documenter.slugify(text)
# Add the header to the document's header map.
anchor = Documenter.anchor_add!(doc.internal.headers, header, slug, page.build)
@@ -293,7 +296,16 @@ function Selectors.runner(::Type{Expanders.MetaBlocks}, node, page, doc)
# wants to hide. We should probably warn, but it is common enough that
# we will silently skip for now.
if Documenter.isassign(ex)
- if !(ex.args[1] in (:CurrentModule, :DocTestSetup, :DocTestFilters, :EditURL, :Description, :Draft))
+ if !(
+ ex.args[1] in (
+ :CurrentModule,
+ :DocTestSetup,
+ :DocTestFilters,
+ :EditURL,
+ :Description,
+ :Draft
+ )
+ )
source = Documenter.locrepr(page.source, lines)
@warn(
"In $source: `@meta` block has an unsupported " *
@@ -303,13 +315,17 @@ function Selectors.runner(::Type{Expanders.MetaBlocks}, node, page, doc)
try
meta[ex.args[1]] = Core.eval(Main, ex.args[2])
catch err
- @docerror(doc, :meta_block,
+ @docerror(
+ doc,
+ :meta_block,
"""
failed to evaluate `$(strip(str))` in `@meta` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """, exception = err)
+ """,
+ exception = err
+ )
end
end
end
@@ -369,34 +385,46 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc)
lines = Documenter.find_block_in_file(x.code, page.source)
@debug "Evaluating @docs block:\n$(x.code)"
for (ex, str) in Documenter.parseblock(x.code, doc, page)
- admonition = first(Documenter.mdparse("""
- !!! warning "Missing docstring."
+ admonition = first(
+ Documenter.mdparse(
+ """
+!!! warning "Missing docstring."
- Missing docstring for `$(strip(str))`. Check Documenter's build log for details.
- """, mode=:blocks))
+ Missing docstring for `$(strip(str))`. Check Documenter's build log for details.
+""",
+ mode=:blocks
+ )
+ )
binding = try
Documenter.DocSystem.binding(curmod, ex)
catch err
- @docerror(doc, :docs_block,
+ @docerror(
+ doc,
+ :docs_block,
"""
unable to get the binding for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines)) from expression '$(repr(ex))' in module $(curmod)
```$(x.info)
$(x.code)
```
""",
- exception = err)
+ exception = err
+ )
push!(docsnodes, admonition)
continue
end
# Undefined `Bindings` get discarded.
- if !Documenter.DocSystem.iskeyword(binding) && !Documenter.DocSystem.defined(binding)
- @docerror(doc, :docs_block,
+ if !Documenter.DocSystem.iskeyword(binding) &&
+ !Documenter.DocSystem.defined(binding)
+ @docerror(
+ doc,
+ :docs_block,
"""
undefined binding '$(binding)' in `@docs` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """)
+ """
+ )
push!(docsnodes, admonition)
continue
end
@@ -404,19 +432,22 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc)
object = make_object(binding, typesig, is_canonical, doc, page)
# We can't include the same object more than once in a document.
if haskey(doc.internal.objects, object)
- @docerror(doc, :docs_block,
+ @docerror(
+ doc,
+ :docs_block,
"""
duplicate docs found for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """)
+ """
+ )
push!(docsnodes, admonition)
continue
end
# Find the docs matching `binding` and `typesig`. Only search within the provided modules.
- docs = Documenter.DocSystem.getdocs(binding, typesig; modules = doc.blueprint.modules)
+ docs = Documenter.DocSystem.getdocs(binding, typesig; modules=doc.blueprint.modules)
# Include only docstrings from user-provided modules if provided.
if !isempty(doc.blueprint.modules)
@@ -425,13 +456,16 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc)
# Check that we aren't printing an empty docs list. Skip block when empty.
if isempty(docs)
- @docerror(doc, :docs_block,
+ @docerror(
+ doc,
+ :docs_block,
"""
no docs found for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """)
+ """
+ )
push!(docsnodes, admonition)
continue
end
@@ -462,7 +496,7 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc)
x = node.element
is_canonical = parse_docs_args("autodocs", x.info)
curmod = get(page.globals.meta, :CurrentModule, Main)
- fields = Dict{Symbol, Any}()
+ fields = Dict{Symbol,Any}()
lines = Documenter.find_block_in_file(x.code, page.source)
@debug "Evaluating @autodocs block:\n$(x.code)"
for (ex, str) in Documenter.parseblock(x.code, doc, page)
@@ -480,13 +514,17 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc)
)
end
catch err
- @docerror(doc, :autodocs_block,
+ @docerror(
+ doc,
+ :autodocs_block,
"""
failed to evaluate `$(strip(str))` in `@autodocs` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """, exception = err)
+ """,
+ exception = err
+ )
end
end
end
@@ -509,42 +547,53 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc)
Documenter.DocSystem.category(binding)
catch err
isa(err, UndefVarError) || rethrow(err)
- @docerror(doc, :autodocs_block,
- """
- @autodocs ($(Documenter.locrepr(page.source, lines))) encountered a bad docstring binding '$(binding)'
- ```$(x.info)
- $(x.code)
- ```
- This is likely due to a bug in the Julia docsystem relating to the handling of
- docstrings attached to methods of callable objects. See:
-
- https://github.com/JuliaLang/julia/issues/45174
-
- As a workaround, the docstrings for the functor methods could be included in the docstring
- of the type definition. This error can also be ignored by disabling strict checking for
- :autodocs_block in the makedocs call with e.g.
-
- warnonly = [:autodocs_block]
-
- However, the relevant docstrings will then not be included by the @autodocs block.
- """, exception = err)
+ @docerror(
+ doc,
+ :autodocs_block,
+ """
+ @autodocs ($(Documenter.locrepr(page.source, lines))) encountered a bad docstring binding '$(binding)'
+ ```$(x.info)
+ $(x.code)
+ ```
+ This is likely due to a bug in the Julia docsystem relating to the handling of
+ docstrings attached to methods of callable objects. See:
+
+ https://github.com/JuliaLang/julia/issues/45174
+
+ As a workaround, the docstrings for the functor methods could be included in the docstring
+ of the type definition. This error can also be ignored by disabling strict checking for
+ :autodocs_block in the makedocs call with e.g.
+
+ warnonly = [:autodocs_block]
+
+ However, the relevant docstrings will then not be included by the @autodocs block.
+ """,
+ exception = err
+ )
continue # skip this docstring
end
if category in order && included
# filter the elements after category/order has been evaluated
# to ensure that e.g. when `Order = [:type]` is given, the filter
# function really receives only types
- filtered = Base.invokelatest(filterfunc, Core.eval(binding.mod, binding.var))
+ filtered =
+ Base.invokelatest(filterfunc, Core.eval(binding.mod, binding.var))
if filtered
for (typesig, docstr) in multidoc.docs
path = normpath(docstr.data[:path])
object = make_object(binding, typesig, is_canonical, doc, page)
if isempty(pages)
- push!(results, (mod, path, category, object, isexported, docstr))
+ push!(
+ results,
+ (mod, path, category, object, isexported, docstr)
+ )
else
for p in pages
if endswith(path, p)
- push!(results, (mod, p, category, object, isexported, docstr))
+ push!(
+ results,
+ (mod, p, category, object, isexported, docstr)
+ )
break
end
end
@@ -562,23 +611,26 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc)
comparison = function (a, b)
local t
(t = Documenter._compare(modulemap, 1, a, b)) == 0 || return t < 0 # module
- (t = Documenter._compare(pagesmap, 2, a, b)) == 0 || return t < 0 # page
- (t = Documenter._compare(ordermap, 3, a, b)) == 0 || return t < 0 # category
+ (t = Documenter._compare(pagesmap, 2, a, b)) == 0 || return t < 0 # page
+ (t = Documenter._compare(ordermap, 3, a, b)) == 0 || return t < 0 # category
string(a[4]) < string(b[4]) # name
end
- sort!(results; lt = comparison)
+ sort!(results; lt=comparison)
# Finalise docstrings.
docsnodes = Node[]
for (mod, path, category, object, isexported, docstr) in results
if haskey(doc.internal.objects, object)
- @docerror(doc, :autodocs_block,
+ @docerror(
+ doc,
+ :autodocs_block,
"""
duplicate docs found for '$(object.binding)' in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """)
+ """
+ )
continue
end
markdown::Markdown.MD = Documenter.DocSystem.parsedoc(docstr)
@@ -595,13 +647,16 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc)
push!(node.children, docsnode)
end
else
- @docerror(doc, :autodocs_block,
+ @docerror(
+ doc,
+ :autodocs_block,
"""
'@autodocs' missing 'Modules = ...' in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """)
+ """
+ )
end
end
@@ -620,24 +675,33 @@ function Selectors.runner(::Type{Expanders.EvalBlocks}, node, page, doc)
end
sandbox = Module(:EvalBlockSandbox)
lines = Documenter.find_block_in_file(x.code, page.source)
- linenumbernode = LineNumberNode(lines === nothing ? 0 : lines.first,
- basename(page.source))
+ linenumbernode =
+ LineNumberNode(lines === nothing ? 0 : lines.first, basename(page.source))
@debug "Evaluating @eval block:\n$(x.code)"
cd(page.workdir) do
result = nothing
- for (ex, str) in Documenter.parseblock(x.code, doc, page; keywords = false,
- linenumbernode = linenumbernode)
+ for (ex, str) in Documenter.parseblock(
+ x.code,
+ doc,
+ page;
+ keywords=false,
+ linenumbernode=linenumbernode
+ )
try
result = Core.eval(sandbox, ex)
catch err
bt = Documenter.remove_common_backtrace(catch_backtrace())
- @docerror(doc, :eval_block,
+ @docerror(
+ doc,
+ :eval_block,
"""
failed to evaluate `@eval` block in $(Documenter.locrepr(page.source))
```$(x.info)
$(x.code)
```
- """, exception = (err, bt))
+ """,
+ exception = (err, bt)
+ )
end
end
result = if isnothing(result)
@@ -647,19 +711,23 @@ function Selectors.runner(::Type{Expanders.EvalBlocks}, node, page, doc)
else
# TODO: we could handle the cases where the user provides some of the Markdown library
# objects, like Paragraph.
- @docerror(doc, :eval_block, """
- Invalid type of object in @eval in $(Documenter.locrepr(page.source))
- ```$(x.info)
- $(x.code)
- ```
- Evaluated to `$(typeof(result))`, but should be one of
- - Nothing
- - Markdown.MD
- Falling back to textual code block representation.
-
- If you are seeing this warning/error after upgrading Documenter and this used to work,
- please open an issue on the Documenter issue tracker.
- """)
+ @docerror(
+ doc,
+ :eval_block,
+ """
+Invalid type of object in @eval in $(Documenter.locrepr(page.source))
+```$(x.info)
+$(x.code)
+```
+Evaluated to `$(typeof(result))`, but should be one of
+ - Nothing
+ - Markdown.MD
+Falling back to textual code block representation.
+
+If you are seeing this warning/error after upgrading Documenter and this used to work,
+please open an issue on the Documenter issue tracker.
+"""
+ )
MarkdownAST.@ast MarkdownAST.Document() do
MarkdownAST.CodeBlock("", sprint(show, MIME"text/plain"(), result))
end
@@ -739,17 +807,23 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc)
result, buffer = nothing, IOBuffer()
if !continued # run the code
# check if there is any code waiting
- if haskey(page.globals.meta, :ContinuedCode) && haskey(page.globals.meta[:ContinuedCode], sym)
+ if haskey(page.globals.meta, :ContinuedCode) &&
+ haskey(page.globals.meta[:ContinuedCode], sym)
code = page.globals.meta[:ContinuedCode][sym] * '\n' * x.code
delete!(page.globals.meta[:ContinuedCode], sym)
else
code = x.code
end
- linenumbernode = LineNumberNode(lines === nothing ? 0 : lines.first,
- basename(page.source))
- for (ex, str) in Documenter.parseblock(code, doc, page; keywords = false,
- linenumbernode = linenumbernode)
- c = IOCapture.capture(rethrow = InterruptException, color = ansicolor) do
+ linenumbernode =
+ LineNumberNode(lines === nothing ? 0 : lines.first, basename(page.source))
+ for (ex, str) in Documenter.parseblock(
+ code,
+ doc,
+ page;
+ keywords=false,
+ linenumbernode=linenumbernode
+ )
+ c = IOCapture.capture(rethrow=InterruptException, color=ansicolor) do
cd(page.workdir) do
Core.eval(mod, ex)
end
@@ -759,13 +833,17 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc)
print(buffer, c.output)
if c.error
bt = Documenter.remove_common_backtrace(c.backtrace)
- @docerror(doc, :example_block,
+ @docerror(
+ doc,
+ :example_block,
"""
failed to run `@example` block in $(Documenter.locrepr(page.source, lines))
```$(x.info)
$(x.code)
```
- """, exception = (c.value, bt))
+ """,
+ exception = (c.value, bt)
+ )
return
end
end
@@ -778,7 +856,7 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc)
input = droplines(x.code)
# Generate different in different formats and let each writer select
- output = Base.invokelatest(Documenter.display_dict, result, context = :color => ansicolor)
+ output = Base.invokelatest(Documenter.display_dict, result, context=:color => ansicolor)
# Remove references to gensym'd module from text/plain
m = MIME"text/plain"()
if haskey(output, m)
@@ -790,7 +868,14 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc)
if result === nothing
stdouterr = Documenter.sanitise(buffer)
stdouterr = remove_sandbox_from_output(stdouterr, mod)
- isempty(stdouterr) || push!(content, Node(Documenter.MultiOutputElement(Dict{MIME,Any}(MIME"text/plain"() => stdouterr))))
+ isempty(stdouterr) || push!(
+ content,
+ Node(
+ Documenter.MultiOutputElement(
+ Dict{MIME,Any}(MIME"text/plain"() => stdouterr)
+ )
+ )
+ )
elseif !isempty(output)
push!(content, Node(Documenter.MultiOutputElement(output)))
end
@@ -837,20 +922,25 @@ function Selectors.runner(::Type{Expanders.REPLBlocks}, node, page, doc)
multicodeblock = MarkdownAST.CodeBlock[]
linenumbernode = LineNumberNode(0, "REPL") # line unused, set to 0
@debug "Evaluating @repl block:\n$(x.code)"
- for (ex, str) in Documenter.parseblock(x.code, doc, page; keywords = false,
- linenumbernode = linenumbernode)
- input = droplines(str)
+ for (ex, str) in Documenter.parseblock(
+ x.code,
+ doc,
+ page;
+ keywords=false,
+ linenumbernode=linenumbernode
+ )
+ input = droplines(str)
# Use the REPL softscope for REPLBlocks,
# see https://github.com/JuliaLang/julia/pull/33864
ex = REPL.softscope(ex)
- c = IOCapture.capture(rethrow = InterruptException, color = ansicolor) do
+ c = IOCapture.capture(rethrow=InterruptException, color=ansicolor) do
cd(page.workdir) do
Core.eval(mod, ex)
end
end
Core.eval(mod, Expr(:global, Expr(:(=), :ans, QuoteNode(c.value))))
result = c.value
- buf = IOContext(IOBuffer(), :color=>ansicolor)
+ buf = IOContext(IOBuffer(), :color => ansicolor)
output = if !c.error
hide = REPL.ends_with_semicolon(input)
result_to_string(buf, hide ? nothing : c.value)
@@ -858,7 +948,10 @@ function Selectors.runner(::Type{Expanders.REPLBlocks}, node, page, doc)
error_to_string(buf, c.value, [])
end
if !isempty(input)
- push!(multicodeblock, MarkdownAST.CodeBlock("julia-repl", prepend_prompt(input)))
+ push!(
+ multicodeblock,
+ MarkdownAST.CodeBlock("julia-repl", prepend_prompt(input))
+ )
end
out = IOBuffer()
print(out, c.output) # c.output is std(out|err)
@@ -908,13 +1001,17 @@ function Selectors.runner(::Type{Expanders.SetupBlocks}, node, page, doc)
end
catch err
bt = Documenter.remove_common_backtrace(catch_backtrace())
- @docerror(doc, :setup_block,
+ @docerror(
+ doc,
+ :setup_block,
"""
failed to run `@setup` block in $(Documenter.locrepr(page.source))
```$(x.info)
$(x.code)
```
- """, exception=(err, bt))
+ """,
+ exception = (err, bt)
+ )
end
node.element = Documenter.SetupNode(x.info, x.code)
end
@@ -952,9 +1049,9 @@ function namedheader(node::Node)
end
# Remove any `# hide` lines, leading/trailing blank lines, and trailing whitespace.
-function droplines(code; skip = 0)
+function droplines(code; skip=0)
buffer = IOBuffer()
- for line in split(code, r"\r?\n")[(skip + 1):end]
+ for line in split(code, r"\r?\n")[(skip+1):end]
occursin(r"^(.*)#\s*hide$", line) && continue
println(buffer, rstrip(line))
end
@@ -964,7 +1061,7 @@ end
function prepend_prompt(input)
prompt = "julia> "
padding = " "^length(prompt)
- out = IOBuffer()
+ out = IOBuffer()
for (n, line) in enumerate(split(input, '\n'))
line = rstrip(line)
println(out, n == 1 ? prompt : padding, line)
diff --git a/src/html/HTMLWriter.jl b/src/html/HTMLWriter.jl
index f5226a5d58..8dd154a4d3 100644
--- a/src/html/HTMLWriter.jl
+++ b/src/html/HTMLWriter.jl
@@ -71,12 +71,17 @@ const ASSETS_SASS = joinpath(ASSETS, "scss")
const ASSETS_THEMES = joinpath(ASSETS, "themes")
struct HTMLAsset
- class :: Symbol
- uri :: String
- islocal :: Bool
- attributes::Dict{Symbol, String}
-
- function HTMLAsset(class::Symbol, uri::String, islocal::Bool, attributes::Dict{Symbol, String}=Dict{Symbol,String}())
+ class::Symbol
+ uri::String
+ islocal::Bool
+ attributes::Dict{Symbol,String}
+
+ function HTMLAsset(
+ class::Symbol,
+ uri::String,
+ islocal::Bool,
+ attributes::Dict{Symbol,String}=Dict{Symbol,String}()
+ )
if !islocal && match(r"^https?://", uri) === nothing
error("Remote asset URL must start with http:// or https://")
end
@@ -123,7 +128,7 @@ Documenter.HTML(assets = [
])
```
"""
-function asset(uri; class = nothing, islocal=false, attributes=Dict{Symbol,String}())
+function asset(uri; class=nothing, islocal=false, attributes=Dict{Symbol,String}())
if class === nothing
class = assetclass(uri)
(class === nothing) && error("""
@@ -137,9 +142,7 @@ end
function assetclass(uri)
# TODO: support actual proper URIs
ext = splitext(uri)[end]
- ext == ".ico" ? :ico :
- ext == ".css" ? :css :
- ext == ".js" ? :js : :unknown
+ ext == ".ico" ? :ico : ext == ".css" ? :css : ext == ".js" ? :js : :unknown
end
abstract type MathEngine end
@@ -162,13 +165,13 @@ setting `override` to `true`, in which case the default values are ignored and o
user-provided dictionary is used.
"""
struct KaTeX <: MathEngine
- config :: Dict{Symbol,Any}
- function KaTeX(config::Union{Dict,Nothing} = nothing, override=false)
+ config::Dict{Symbol,Any}
+ function KaTeX(config::Union{Dict,Nothing}=nothing, override=false)
default = Dict(
:delimiters => [
- Dict(:left => raw"$", :right => raw"$", display => false),
- Dict(:left => raw"$$", :right => raw"$$", display => true),
- Dict(:left => raw"\[", :right => raw"\]", display => true),
+ Dict(:left => raw"$", :right => raw"$", display => false),
+ Dict(:left => raw"$$", :right => raw"$$", display => true),
+ Dict(:left => raw"\[", :right => raw"\]", display => true),
]
)
new((config === nothing) ? default : override ? config : merge(default, config))
@@ -197,20 +200,16 @@ The URL of the MathJax JS file can be overridden using the `url` keyword argumen
use a particular minor version).
"""
struct MathJax2 <: MathEngine
- config :: Dict{Symbol,Any}
- url :: String
- function MathJax2(config::Union{Dict,Nothing} = nothing, override=false; url = "")
+ config::Dict{Symbol,Any}
+ url::String
+ function MathJax2(config::Union{Dict,Nothing}=nothing, override=false; url="")
default = Dict(
:tex2jax => Dict(
- "inlineMath" => [["\$","\$"], ["\\(","\\)"]],
+ "inlineMath" => [["\$", "\$"], ["\\(", "\\)"]],
"processEscapes" => true
),
:config => ["MMLorHTML.js"],
- :jax => [
- "input/TeX",
- "output/HTML-CSS",
- "output/NativeMML"
- ],
+ :jax => ["input/TeX", "output/HTML-CSS", "output/NativeMML"],
:extensions => [
"MathMenu.js",
"MathZoom.js",
@@ -221,11 +220,17 @@ struct MathJax2 <: MathEngine
],
:TeX => Dict(:equationNumbers => Dict(:autoNumber => "AMS"))
)
- new((config === nothing) ? default : override ? config : merge(default, config), url)
+ new(
+ (config === nothing) ? default : override ? config : merge(default, config),
+ url
+ )
end
end
-@deprecate MathJax(config::Union{Dict,Nothing} = nothing, override=false) MathJax2(config, override) false
+@deprecate MathJax(config::Union{Dict,Nothing}=nothing, override=false) MathJax2(
+ config,
+ override
+) false
@doc "deprecated – Use [`MathJax2`](@ref) instead" MathJax
"""
@@ -252,12 +257,12 @@ The URL of the MathJax JS file can be overridden using the `url` keyword argumen
use a particular minor version).
"""
struct MathJax3 <: MathEngine
- config :: Dict{Symbol,Any}
- url :: String
- function MathJax3(config::Union{Dict,Nothing} = nothing, override=false; url = "")
+ config::Dict{Symbol,Any}
+ url::String
+ function MathJax3(config::Union{Dict,Nothing}=nothing, override=false; url="")
default = Dict(
:tex => Dict(
- "inlineMath" => [["\$","\$"], ["\\(","\\)"]],
+ "inlineMath" => [["\$", "\$"], ["\\(", "\\)"]],
"tags" => "ams",
"packages" => ["base", "ams", "autoload"],
),
@@ -266,7 +271,10 @@ struct MathJax3 <: MathEngine
"processHtmlClass" => "tex2jax_process",
)
)
- new((config === nothing) ? default : override ? config : merge(default, config), url)
+ new(
+ (config === nothing) ? default : override ? config : merge(default, config),
+ url
+ )
end
end
@@ -436,73 +444,81 @@ their absolute URLs, can be included with the [`asset`](@ref) function.
[^1]: Adding an ICO asset is primarily useful for setting a custom `favicon`.
"""
struct HTML <: Documenter.Writer
- prettyurls :: Bool
- disable_git :: Bool
- edit_link :: Union{String, Symbol, Nothing}
- repolink :: Union{String, Nothing, Default{Nothing}}
- canonical :: Union{String, Nothing}
- assets :: Vector{HTMLAsset}
- analytics :: String
- collapselevel :: Int
- sidebar_sitename :: Bool
- highlights :: Vector{String}
- mathengine :: Union{MathEngine,Nothing}
- description :: Union{String,Nothing}
- footer :: Union{MarkdownAST.Node, Nothing}
- ansicolor :: Bool
- lang :: String
- warn_outdated :: Bool
- prerender :: Bool
- node :: Union{Cmd,String,Nothing}
- highlightjs :: Union{String,Nothing}
- size_threshold :: Int
- size_threshold_warn :: Int
- size_threshold_ignore :: Vector{String}
- example_size_threshold :: Int
+ prettyurls::Bool
+ disable_git::Bool
+ edit_link::Union{String,Symbol,Nothing}
+ repolink::Union{String,Nothing,Default{Nothing}}
+ canonical::Union{String,Nothing}
+ assets::Vector{HTMLAsset}
+ analytics::String
+ collapselevel::Int
+ sidebar_sitename::Bool
+ highlights::Vector{String}
+ mathengine::Union{MathEngine,Nothing}
+ description::Union{String,Nothing}
+ footer::Union{MarkdownAST.Node,Nothing}
+ ansicolor::Bool
+ lang::String
+ warn_outdated::Bool
+ prerender::Bool
+ node::Union{Cmd,String,Nothing}
+ highlightjs::Union{String,Nothing}
+ size_threshold::Int
+ size_threshold_warn::Int
+ size_threshold_ignore::Vector{String}
+ example_size_threshold::Int
function HTML(;
- prettyurls :: Bool = true,
- disable_git :: Bool = false,
- repolink :: Union{String, Nothing, Default} = Default(nothing),
- edit_link :: Union{String, Symbol, Nothing, Default} = Default(Documenter.git_remote_head_branch("HTML(edit_link = ...)", Documenter.currentdir())),
- canonical :: Union{String, Nothing} = nothing,
- assets :: Vector = String[],
- analytics :: String = "",
- collapselevel :: Integer = 2,
- sidebar_sitename :: Bool = true,
- highlights :: Vector{String} = String[],
- mathengine :: Union{MathEngine,Nothing} = KaTeX(),
- description :: Union{String, Nothing} = nothing,
- footer :: Union{String, Nothing} = "Powered by [Documenter.jl](https://github.com/JuliaDocs/Documenter.jl) and the [Julia Programming Language](https://julialang.org/).",
- ansicolor :: Bool = true,
- lang :: String = "en",
- warn_outdated :: Bool = true,
- prerender :: Bool = false,
- node :: Union{Cmd,String,Nothing} = nothing,
- highlightjs :: Union{String,Nothing} = nothing,
- size_threshold :: Union{Integer, Nothing} = 200 * 2^10, # 200 KiB
- size_threshold_warn :: Union{Integer, Nothing} = 100 * 2^10, # 100 KiB
- size_threshold_ignore :: Vector = String[],
- # The choice of the default here is that having ~10 figures on a page
- # seems reasonable, and that would lead to ~80 KiB, which is still fine
- # and leaves a buffer before hitting `size_threshold_warn`.
- example_size_threshold :: Union{Integer, Nothing} = 8 * 2^10, # 8 KiB
-
- # deprecated keywords
- edit_branch :: Union{String, Nothing, Default} = Default(nothing),
- )
+ prettyurls::Bool=true,
+ disable_git::Bool=false,
+ repolink::Union{String,Nothing,Default}=Default(nothing),
+ edit_link::Union{String,Symbol,Nothing,Default}=Default(
+ Documenter.git_remote_head_branch(
+ "HTML(edit_link = ...)",
+ Documenter.currentdir()
+ )
+ ),
+ canonical::Union{String,Nothing}=nothing,
+ assets::Vector=String[],
+ analytics::String="",
+ collapselevel::Integer=2,
+ sidebar_sitename::Bool=true,
+ highlights::Vector{String}=String[],
+ mathengine::Union{MathEngine,Nothing}=KaTeX(),
+ description::Union{String,Nothing}=nothing,
+ footer::Union{String,Nothing}="Powered by [Documenter.jl](https://github.com/JuliaDocs/Documenter.jl) and the [Julia Programming Language](https://julialang.org/).",
+ ansicolor::Bool=true,
+ lang::String="en",
+ warn_outdated::Bool=true,
+ prerender::Bool=false,
+ node::Union{Cmd,String,Nothing}=nothing,
+ highlightjs::Union{String,Nothing}=nothing,
+ size_threshold::Union{Integer,Nothing}=200 * 2^10, # 200 KiB
+ size_threshold_warn::Union{Integer,Nothing}=100 * 2^10, # 100 KiB
+ size_threshold_ignore::Vector=String[],
+ # The choice of the default here is that having ~10 figures on a page
+ # seems reasonable, and that would lead to ~80 KiB, which is still fine
+ # and leaves a buffer before hitting `size_threshold_warn`.
+ example_size_threshold::Union{Integer,Nothing}=8 * 2^10, # 8 KiB
+
+ # deprecated keywords
+ edit_branch::Union{String,Nothing,Default}=Default(nothing),
+ )
collapselevel >= 1 || throw(ArgumentError("collapselevel must be >= 1"))
if prerender
- prerender, node, highlightjs = prepare_prerendering(prerender, node, highlightjs, highlights)
+ prerender, node, highlightjs =
+ prepare_prerendering(prerender, node, highlightjs, highlights)
end
assets = map(assets) do asset
isa(asset, HTMLAsset) && return asset
- isa(asset, AbstractString) && return HTMLAsset(assetclass(asset), asset, true)
+ isa(asset, AbstractString) &&
+ return HTMLAsset(assetclass(asset), asset, true)
error("Invalid value in assets: $(asset) [$(typeof(asset))]")
end
# Handle edit_branch deprecation
if !isa(edit_branch, Default)
- isa(edit_link, Default) || error("Can't specify edit_branch (deprecated) and edit_link simultaneously")
+ isa(edit_link, Default) ||
+ error("Can't specify edit_branch (deprecated) and edit_link simultaneously")
@warn """
The edit_branch keyword is deprecated -- use edit_link instead.
Note: `edit_branch = nothing` must be changed to `edit_link = :commit`.
@@ -515,7 +531,11 @@ struct HTML <: Documenter.Writer
if footer !== nothing
footer = Markdown.parse(footer)
if !(length(footer.content) == 1 && footer.content[1] isa Markdown.Paragraph)
- throw(ArgumentError("footer must be a single-line markdown compatible string."))
+ throw(
+ ArgumentError(
+ "footer must be a single-line markdown compatible string."
+ )
+ )
end
footer = isnothing(footer) ? nothing : convert(Node, footer)
end
@@ -523,25 +543,59 @@ struct HTML <: Documenter.Writer
if isnothing(size_threshold)
size_threshold = typemax(Int)
elseif size_threshold <= 0
- throw(ArgumentError("size_threshold must be non-negative, got $(size_threshold)"))
+ throw(
+ ArgumentError("size_threshold must be non-negative, got $(size_threshold)")
+ )
end
if isnothing(size_threshold_warn)
size_threshold_warn = min(typemax(Int), size_threshold)
elseif size_threshold_warn <= 0
- throw(ArgumentError("size_threshold_warn must be non-negative, got $(size_threshold_warn)"))
+ throw(
+ ArgumentError(
+ "size_threshold_warn must be non-negative, got $(size_threshold_warn)"
+ )
+ )
elseif size_threshold_warn > size_threshold
- throw(ArgumentError("size_threshold_warn ($size_threshold_warn) must be smaller than size_threshold ($size_threshold)"))
+ throw(
+ ArgumentError(
+ "size_threshold_warn ($size_threshold_warn) must be smaller than size_threshold ($size_threshold)"
+ )
+ )
end
if isnothing(example_size_threshold)
example_size_threshold = typemax(Int)
elseif example_size_threshold < 0
- throw(ArgumentError("example_size_threshold must be non-negative, got $(example_size_threshold)"))
+ throw(
+ ArgumentError(
+ "example_size_threshold must be non-negative, got $(example_size_threshold)"
+ )
+ )
end
isa(edit_link, Default) && (edit_link = edit_link[])
- new(prettyurls, disable_git, edit_link, repolink, canonical, assets, analytics,
- collapselevel, sidebar_sitename, highlights, mathengine, description, footer,
- ansicolor, lang, warn_outdated, prerender, node, highlightjs,
- size_threshold, size_threshold_warn, size_threshold_ignore, example_size_threshold,
+ new(
+ prettyurls,
+ disable_git,
+ edit_link,
+ repolink,
+ canonical,
+ assets,
+ analytics,
+ collapselevel,
+ sidebar_sitename,
+ highlights,
+ mathengine,
+ description,
+ footer,
+ ansicolor,
+ lang,
+ warn_outdated,
+ prerender,
+ node,
+ highlightjs,
+ size_threshold,
+ size_threshold_warn,
+ size_threshold_ignore,
+ example_size_threshold,
)
end
end
@@ -570,7 +624,7 @@ function prepare_prerendering(prerender, node, highlightjs, highlights)
@debug "HTMLWriter: downloading highlightjs"
r = Documenter.JSDependencies.RequireJS([])
RD.highlightjs!(r, highlights)
- libs = sort!(collect(r.libraries); by = first) # puts highlight first
+ libs = sort!(collect(r.libraries); by=first) # puts highlight first
key = join((x.first for x in libs), ',')
highlightjs = get!(HLJSFILES, key) do
path, io = mktemp()
@@ -589,13 +643,13 @@ end
include("RD.jl")
struct SearchRecord
- src :: String
- page :: Documenter.Page
- fragment :: String
- category :: String
- title :: String
- page_title :: String
- text :: String
+ src::String
+ page::Documenter.Page
+ fragment::String
+ category::String
+ title::String
+ page_title::String
+ text::String
end
Base.@kwdef struct AtExampleFallbackWarning
@@ -609,19 +663,26 @@ end
other recursive functions.
"""
mutable struct HTMLContext
- doc :: Documenter.Document
- settings :: Union{HTML, Nothing}
- scripts :: Vector{String}
- documenter_js :: String
- themeswap_js :: String
- warner_js :: String
- search_index :: Vector{SearchRecord}
- search_index_js :: String
- search_navnode :: Documenter.NavNode
+ doc::Documenter.Document
+ settings::Union{HTML,Nothing}
+ scripts::Vector{String}
+ documenter_js::String
+ themeswap_js::String
+ warner_js::String
+ search_index::Vector{SearchRecord}
+ search_index_js::String
+ search_navnode::Documenter.NavNode
atexample_warnings::Vector{AtExampleFallbackWarning}
HTMLContext(doc, settings=nothing) = new(
- doc, settings, [], "", "", "", [], "",
+ doc,
+ settings,
+ [],
+ "",
+ "",
+ "",
+ [],
+ "",
Documenter.NavNode("search", "Search", nothing),
AtExampleFallbackWarning[],
)
@@ -629,24 +690,31 @@ end
struct DCtx
# ctx and navnode were recursively passed to all domify() methods
- ctx :: HTMLContext
- navnode :: Documenter.NavNode
+ ctx::HTMLContext
+ navnode::Documenter.NavNode
# The following fields were keyword arguments to mdconvert()
- droplinks :: Bool
- settings :: Union{HTML, Nothing}
- footnotes :: Union{Vector{Node{Nothing}},Nothing}
+ droplinks::Bool
+ settings::Union{HTML,Nothing}
+ footnotes::Union{Vector{Node{Nothing}},Nothing}
DCtx(ctx, navnode, droplinks=false) = new(ctx, navnode, droplinks, ctx.settings, [])
DCtx(
dctx::DCtx;
- navnode = dctx.navnode,
- droplinks = dctx.droplinks,
- settings = dctx.settings,
- footnotes = dctx.footnotes,
+ navnode=dctx.navnode,
+ droplinks=dctx.droplinks,
+ settings=dctx.settings,
+ footnotes=dctx.footnotes,
) = new(dctx.ctx, navnode, droplinks, settings, footnotes)
end
-function SearchRecord(ctx::HTMLContext, navnode; fragment="", title=nothing, category="page", text="")
+function SearchRecord(
+ ctx::HTMLContext,
+ navnode;
+ fragment="",
+ title=nothing,
+ category="page",
+ text=""
+)
page_title = mdflatten_pagetitle(DCtx(ctx, navnode))
if title === nothing
title = page_title
@@ -662,12 +730,20 @@ function SearchRecord(ctx::HTMLContext, navnode; fragment="", title=nothing, cat
)
end
-function SearchRecord(ctx::HTMLContext, navnode, node::Node, element::Documenter.AnchoredHeader)
+function SearchRecord(
+ ctx::HTMLContext,
+ navnode,
+ node::Node,
+ element::Documenter.AnchoredHeader
+)
a = element.anchor
- SearchRecord(ctx, navnode;
+ SearchRecord(
+ ctx,
+ navnode;
fragment=Documenter.anchor_fragment(a),
title=mdflatten(node), # AnchoredHeader has Heading as single child
- category="section")
+ category="section"
+ )
end
function SearchRecord(ctx, navnode, node::Node, ::MarkdownAST.AbstractElement)
@@ -678,7 +754,7 @@ function JSON.lower(rec::SearchRecord)
# Replace any backslashes in links, if building the docs on Windows
src = replace(rec.src, '\\' => '/')
ref = string(src, rec.fragment)
- Dict{String, String}(
+ Dict{String,String}(
"location" => ref,
"page" => rec.page_title,
"title" => rec.title,
@@ -700,10 +776,13 @@ function render(doc::Documenter.Document, settings::HTML=HTML())
if isempty(doc.blueprint.pages)
error("Aborting HTML build: no pages under src/")
elseif !haskey(doc.blueprint.pages, "index.md")
- @warn "Can't generate landing page (index.html): src/index.md missing" keys(doc.blueprint.pages)
+ @warn "Can't generate landing page (index.html): src/index.md missing" keys(
+ doc.blueprint.pages
+ )
end
- if isa(settings.repolink, Default) && (isnothing(doc.user.remote) || Remotes.repourl(doc.user.remote) === nothing)
+ if isa(settings.repolink, Default) &&
+ (isnothing(doc.user.remote) || Remotes.repourl(doc.user.remote) === nothing)
@warn """
Unable to determine the repository root URL for the navbar link.
This can happen when a string is passed to the `repo` keyword of `makedocs`.
@@ -725,7 +804,11 @@ function render(doc::Documenter.Document, settings::HTML=HTML())
@warn "not creating 'documenter.js', provided by the user."
else
r = JSDependencies.RequireJS([
- RD.jquery, RD.jqueryui, RD.headroom, RD.headroom_jquery, RD.minisearch,
+ RD.jquery,
+ RD.jqueryui,
+ RD.headroom,
+ RD.headroom_jquery,
+ RD.minisearch,
])
RD.mathengine!(r, settings.mathengine)
if !settings.prerender
@@ -746,7 +829,11 @@ function render(doc::Documenter.Document, settings::HTML=HTML())
size_limit_successes = map(collect(keys(doc.blueprint.pages))) do page
idx = findfirst(nn -> nn.page == page, doc.internal.navlist)
- nn = (idx === nothing) ? Documenter.NavNode(page, nothing, nothing) : doc.internal.navlist[idx]
+ nn = if (idx === nothing)
+ Documenter.NavNode(page, nothing, nothing)
+ else
+ doc.internal.navlist[idx]
+ end
@debug "Rendering $(page) [$(repr(idx))]"
render_page(ctx, nn)
end
@@ -759,7 +846,7 @@ function render(doc::Documenter.Document, settings::HTML=HTML())
"""
fallbacks = unique(w.fallback for w in ctx.atexample_warnings)
# We'll impose some regular order, but importantly we want 'nothing'-s on the top
- for fallback in sort(fallbacks, by = s -> isnothing(s) ? "" : s)
+ for fallback in sort(fallbacks, by=s -> isnothing(s) ? "" : s)
warnings = filter(w -> w.fallback == fallback, ctx.atexample_warnings)
n_warnings = length(warnings)
largest_size = maximum(w -> w.size_bytes, warnings)
@@ -792,9 +879,12 @@ end
struct HTMLSizeThresholdError <: Exception end
function Base.showerror(io::IO, ::HTMLSizeThresholdError)
- print(io, """
- HTMLSizeThresholdError: Some generated HTML files are above size_threshold.
- See logged errors for details.""")
+ print(
+ io,
+ """
+HTMLSizeThresholdError: Some generated HTML files are above size_threshold.
+See logged errors for details."""
+ )
end
"""
@@ -851,7 +941,16 @@ end
"""
Renders the main `` tag.
"""
-function render_html(ctx, navnode, head, sidebar, navbar, article, footer, scripts::Vector{DOM.Node}=DOM.Node[])
+function render_html(
+ ctx,
+ navnode,
+ head,
+ sidebar,
+ navbar,
+ article,
+ footer,
+ scripts::Vector{DOM.Node}=DOM.Node[]
+)
@tags html body div
DOM.HTMLDocument(
html[:lang=>ctx.settings.lang](
@@ -884,13 +983,14 @@ function render_settings(ctx)
)
)
- now_full, now_short = Dates.format(now(), dateformat"E d U Y HH:MM"), Dates.format(now(), dateformat"E d U Y")
+ now_full, now_short = Dates.format(now(), dateformat"E d U Y HH:MM"),
+ Dates.format(now(), dateformat"E d U Y")
buildinfo = p(
"This document was generated with ",
- a[:href => "https://github.com/JuliaDocs/Documenter.jl"]("Documenter.jl"),
+ a[:href=>"https://github.com/JuliaDocs/Documenter.jl"]("Documenter.jl"),
" version $(Documenter.DOCUMENTER_VERSION)",
" on ",
- span[".colophon-date", :title => now_full](now_short),
+ span[".colophon-date", :title=>now_full](now_short),
". ",
"Using Julia version $(Base.VERSION)."
)
@@ -902,9 +1002,7 @@ function render_settings(ctx)
p[".modal-card-title"]("Settings"),
button[".delete"]()
),
- section[".modal-card-body"](
- theme_selector, hr(), buildinfo
- ),
+ section[".modal-card-body"](theme_selector, hr(), buildinfo),
footer[".modal-card-foot"]()
)
)
@@ -924,27 +1022,22 @@ function render_head(ctx, navnode)
default_site_description(ctx)
end
- css_links = [
- RD.lato,
- RD.juliamono,
- RD.fontawesome_css...,
- RD.katex_css,
- ]
+ css_links = [RD.lato, RD.juliamono, RD.fontawesome_css..., RD.katex_css,]
head(
meta[:charset=>"UTF-8"],
- meta[:name => "viewport", :content => "width=device-width, initial-scale=1.0"],
+ meta[:name=>"viewport", :content=>"width=device-width, initial-scale=1.0"],
# Title tag and meta tags
title(page_title),
- meta[:name => "title", :content => page_title],
- meta[:property => "og:title", :content => page_title],
- meta[:property => "twitter:title", :content => page_title],
+ meta[:name=>"title", :content=>page_title],
+ meta[:property=>"og:title", :content=>page_title],
+ meta[:property=>"twitter:title", :content=>page_title],
# Description meta tags
- meta[:name => "description", :content => description],
- meta[:property => "og:description", :content => description],
- meta[:property => "twitter:description", :content => description],
+ meta[:name=>"description", :content=>description],
+ meta[:property=>"og:description", :content=>description],
+ meta[:property=>"twitter:description", :content=>description],
# Canonical URL tags
canonical_url_tags(ctx, navnode),
@@ -958,31 +1051,31 @@ function render_head(ctx, navnode)
# Stylesheets.
map(css_links) do each
- link[:href => each, :rel => "stylesheet", :type => "text/css"]
+ link[:href=>each, :rel=>"stylesheet", :type=>"text/css"]
end,
-
script("documenterBaseURL=\"$(relhref(src, "."))\""),
script[
- :src => RD.requirejs_cdn,
- Symbol("data-main") => relhref(src, ctx.documenter_js)
+ :src=>RD.requirejs_cdn,
+ Symbol("data-main")=>relhref(src, ctx.documenter_js)
],
- script[:src => relhref(src, ctx.search_index_js)],
-
- script[:src => relhref(src, "siteinfo.js")],
- script[:src => relhref(src, "../versions.js")],
+ script[:src=>relhref(src, ctx.search_index_js)],
+ script[:src=>relhref(src, "siteinfo.js")],
+ script[:src=>relhref(src, "../versions.js")],
# Themes. Note: we reverse the list to make sure that the default theme (first in
# the array) comes as the last tag.
map(Iterators.reverse(enumerate(THEMES))) do (i, theme)
- e = link[".docs-theme-link",
- :rel => "stylesheet", :type => "text/css",
- :href => relhref(src, "assets/themes/$(theme).css"),
- Symbol("data-theme-name") => theme,
+ e = link[
+ ".docs-theme-link",
+ :rel=>"stylesheet",
+ :type=>"text/css",
+ :href=>relhref(src, "assets/themes/$(theme).css"),
+ Symbol("data-theme-name")=>theme,
]
(i == 1) && push!(e.attributes, Symbol("data-theme-primary") => "")
(i == 2) && push!(e.attributes, Symbol("data-theme-primary-dark") => "")
return e
end,
- script[:src => relhref(src, ctx.themeswap_js)],
+ script[:src=>relhref(src, ctx.themeswap_js)],
# Custom user-provided assets.
asset_links(src, ctx.settings.assets),
)
@@ -995,9 +1088,9 @@ function canonical_url_tags(ctx, navnode)
return DOM.VOID
else
tags = DOM.Node[
- meta[:property => "og:url", :content => canonical],
- meta[:property => "twitter:url", :content => canonical],
- link[:rel => "canonical", :href => canonical]
+ meta[:property=>"og:url", :content=>canonical],
+ meta[:property=>"twitter:url", :content=>canonical],
+ link[:rel=>"canonical", :href=>canonical]
]
return tags
end
@@ -1015,9 +1108,9 @@ function preview_image_meta_tags(ctx)
preview = replace(preview, r"[/\\]+" => "/")
preview_url = rstrip(canonical_link, '/') * "/" * preview
tags = DOM.Node[
- meta[:property => "og:image", :content => preview_url],
- meta[:property => "twitter:image", :content => preview_url],
- meta[:property => "twitter:card", :content => "summary_large_image"]
+ meta[:property=>"og:image", :content=>preview_url],
+ meta[:property=>"twitter:image", :content=>preview_url],
+ meta[:property=>"twitter:card", :content=>"summary_large_image"]
]
return tags
end
@@ -1046,10 +1139,15 @@ function asset_links(src::AbstractString, assets::Vector{HTMLAsset})
for asset in assets
class = asset.class
url = asset.islocal ? relhref(src, asset.uri) : asset.uri
- node =
- class == :ico ? link[:href => url, :rel => "icon", :type => "image/x-icon", pairs(asset.attributes)...] :
- class == :css ? link[:href => url, :rel => "stylesheet", :type => "text/css", pairs(asset.attributes)...] :
- class == :js ? script[:src => url, pairs(asset.attributes)...] : continue # Skip non-js/css files.
+ node = if class == :ico
+ link[:href=>url, :rel=>"icon", :type=>"image/x-icon", pairs(asset.attributes)...]
+ elseif class == :css
+ link[:href=>url, :rel=>"stylesheet", :type=>"text/css", pairs(asset.attributes)...]
+ elseif class == :js
+ script[:src=>url, pairs(asset.attributes)...] # Skip non-js/css files.
+ else
+ continue # Skip non-js/css files.
+ end # Skip non-js/css files.
push!(links, node)
end
return links
@@ -1057,20 +1155,29 @@ end
function analytics_script(tracking_id::AbstractString)
@tags script
- isempty(tracking_id) ? DOM.VOID : [
- script[:async, :src => "https://www.googletagmanager.com/gtag/js?id=$(tracking_id)"](),
- script("""
- window.dataLayer = window.dataLayer || [];
- function gtag(){dataLayer.push(arguments);}
- gtag('js', new Date());
- gtag('config', '$(tracking_id)', {'page_path': location.pathname + location.search + location.hash});
- """)
+ if isempty(tracking_id)
+ DOM.VOID
+ else
+ [
+ script[:async, :src=>"https://www.googletagmanager.com/gtag/js?id=$(tracking_id)"](),
+ script(
+ """
+ window.dataLayer = window.dataLayer || [];
+ function gtag(){dataLayer.push(arguments);}
+ gtag('js', new Date());
+ gtag('config', '$(tracking_id)', {'page_path': location.pathname + location.search + location.hash});
+ """
+ )
]
+ end
end
function warning_script(src, ctx)
if ctx.settings.warn_outdated
- return Tag(:script)[Symbol(OUTDATED_VERSION_ATTR), :src => relhref(src, ctx.warner_js)]()
+ return Tag(:script)[
+ Symbol(OUTDATED_VERSION_ATTR),
+ :src=>relhref(src, ctx.warner_js)
+ ]()
end
return DOM.VOID
end
@@ -1079,11 +1186,12 @@ end
# ------------------------------------------------------------------------------
struct NavMenuContext
- htmlctx :: HTMLContext
- current :: Documenter.NavNode
- idstack :: Vector{Int}
+ htmlctx::HTMLContext
+ current::Documenter.NavNode
+ idstack::Vector{Int}
end
-NavMenuContext(ctx::HTMLContext, current::Documenter.NavNode) = NavMenuContext(ctx, current, [])
+NavMenuContext(ctx::HTMLContext, current::Documenter.NavNode) =
+ NavMenuContext(ctx, current, [])
function render_sidebar(ctx, navnode)
@tags a form img input nav div button select option span
@@ -1098,25 +1206,37 @@ function render_sidebar(ctx, navnode)
logo_dark = find_image_asset(ctx, "logo-dark")
if logo !== nothing
alt = isempty(ctx.doc.user.sitename) ? "Logo" : "$(ctx.doc.user.sitename) logo"
- logo_element = a[".docs-logo", :href => href]
+ logo_element = a[".docs-logo", :href=>href]
if logo_dark === nothing
- push!(logo_element.nodes, img[:src => relhref(src, logo), :alt => alt])
+ push!(logo_element.nodes, img[:src=>relhref(src, logo), :alt=>alt])
else
- push!(logo_element.nodes, img[".docs-light-only", :src => relhref(src, logo), :alt => alt])
- push!(logo_element.nodes, img[".docs-dark-only", :src => relhref(src, logo_dark), :alt => alt])
+ push!(
+ logo_element.nodes,
+ img[".docs-light-only", :src=>relhref(src, logo), :alt=>alt]
+ )
+ push!(
+ logo_element.nodes,
+ img[".docs-dark-only", :src=>relhref(src, logo_dark), :alt=>alt]
+ )
end
push!(navmenu.nodes, logo_element)
end
# Sitename
if ctx.settings.sidebar_sitename
- push!(navmenu.nodes, div[".docs-package-name"](
- span[".docs-autofit"](a[:href => href](ctx.doc.user.sitename))
- ))
+ push!(
+ navmenu.nodes,
+ div[".docs-package-name"](
+ span[".docs-autofit"](a[:href=>href](ctx.doc.user.sitename))
+ )
+ )
end
# Search box
- push!(navmenu.nodes,
- button["#documenter-search-query.docs-search-query.input.is-rounded.is-small.is-clickable.my-2.mx-auto.py-1.px-2"]("Search docs (Ctrl + /)")
+ push!(
+ navmenu.nodes,
+ button["#documenter-search-query.docs-search-query.input.is-rounded.is-small.is-clickable.my-2.mx-auto.py-1.px-2"](
+ "Search docs (Ctrl + /)"
+ )
)
# The menu itself
@@ -1132,7 +1252,7 @@ function render_sidebar(ctx, navnode)
vs_select = select["#documenter-version-selector"]
if !isempty(ctx.doc.user.version)
vs_class = "$(vs_class).visible"
- opt = option[:value => "#", :selected => "selected", ](ctx.doc.user.version)
+ opt = option[:value=>"#", :selected=>"selected",](ctx.doc.user.version)
vs_select = vs_select(opt)
end
vs_select = div[".select.is-fullwidth.is-size-7"](vs_select)
@@ -1166,7 +1286,8 @@ function navitem(nctx, nns::Vector)
end
pop!(nctx.idstack)
filter!(node -> node.name !== DOM.TEXT, nodes) # FIXME: why?
- ulclass = (length(nctx.idstack) >= nctx.htmlctx.settings.collapselevel) ? ".collapsed" : ""
+ ulclass =
+ (length(nctx.idstack) >= nctx.htmlctx.settings.collapselevel) ? ".collapsed" : ""
isempty(nodes) ? DOM.Node("") : DOM.Tag(:ul)[ulclass](nodes)
end
function navitem(nctx, nn::Documenter.NavNode)
@@ -1184,19 +1305,23 @@ function navitem(nctx, nn::Documenter.NavNode)
# construct this item
title = domify(dctx, pagetitle(dctx))
currentclass = (nn === current) ? ".is-active" : ""
- item = if length(nctx.idstack) >= ctx.settings.collapselevel && children.name !== DOM.TEXT
- menuid = "menuitem-$(join(nctx.idstack, '-'))"
- input_attr = ["#$(menuid).collapse-toggle", :type => "checkbox"]
- nn in Documenter.navpath(nctx.current) && push!(input_attr, :checked)
- li[currentclass](
- input[input_attr...],
- label[".tocitem", :for => menuid](span[".docs-label"](title), i[".docs-chevron"]),
- )
- elseif nn.page === nothing
- li[currentclass](span[".tocitem"](title))
- else
- li[currentclass](a[".tocitem", :href => navhref(ctx, nn, current)](title))
- end
+ item =
+ if length(nctx.idstack) >= ctx.settings.collapselevel && children.name !== DOM.TEXT
+ menuid = "menuitem-$(join(nctx.idstack, '-'))"
+ input_attr = ["#$(menuid).collapse-toggle", :type => "checkbox"]
+ nn in Documenter.navpath(nctx.current) && push!(input_attr, :checked)
+ li[currentclass](
+ input[input_attr...],
+ label[".tocitem", :for=>menuid](
+ span[".docs-label"](title),
+ i[".docs-chevron"]
+ ),
+ )
+ elseif nn.page === nothing
+ li[currentclass](span[".tocitem"](title))
+ else
+ li[currentclass](a[".tocitem", :href=>navhref(ctx, nn, current)](title))
+ end
# add the subsections (2nd level headings) from the page
if (nn === current) && current.page !== nothing
@@ -1204,7 +1329,7 @@ function navitem(nctx, nn::Documenter.NavNode)
internal_links = map(subs) do s
istoplevel, anchor, text = s
_li = istoplevel ? li[".toplevel"] : li[]
- _li(a[".tocitem", :href => anchor](span(domify(dctx, text.children))))
+ _li(a[".tocitem", :href=>anchor](span(domify(dctx, text.children))))
end
# Only create the ul.internal tag if there actually are in-page headers
length(internal_links) > 0 && push!(item.nodes, ul[".internal"](internal_links))
@@ -1222,7 +1347,7 @@ function render_navbar(ctx, navnode, edit_page_link::Bool)
# Hamburger on mobile
navbar_left = a[
"#documenter-sidebar-button.docs-sidebar-button.docs-navbar-link.fa-solid.fa-bars.is-hidden-desktop",
- :href => "#",
+ :href=>"#",
]
# The breadcrumb (navigation links on top)
@@ -1230,7 +1355,11 @@ function render_navbar(ctx, navnode, edit_page_link::Bool)
header_links = map(navpath) do nn
dctx = DCtx(ctx, nn, true)
title = domify(dctx, pagetitle(dctx))
- nn.page === nothing ? li(a[".is-disabled"](title)) : li(a[:href => navhref(ctx, nn, navnode)](title))
+ if nn.page === nothing
+ li(a[".is-disabled"](title))
+ else
+ li(a[:href=>navhref(ctx, nn, navnode)](title))
+ end
end
header_links[end] = header_links[end][".is-active"]
breadcrumb = nav[".breadcrumb"](
@@ -1247,7 +1376,8 @@ function render_navbar(ctx, navnode, edit_page_link::Bool)
# is not displayed. The user can also pass `repolink` to HTML to either disable it
# (repolink = nothing) or override the link URL (if set to a string). In the latter case,
# we try to figure out what icon and string we should use based on the URL.
- if !isnothing(ctx.settings.repolink) && (ctx.settings.repolink isa String || ctx.doc.user.remote isa Remotes.Remote)
+ if !isnothing(ctx.settings.repolink) &&
+ (ctx.settings.repolink isa String || ctx.doc.user.remote isa Remotes.Remote)
url, (host, logo) = if ctx.settings.repolink isa String
ctx.settings.repolink, host_logo(ctx.settings.repolink)
else # ctx.doc.user.remote isa Remotes.Remote
@@ -1256,10 +1386,13 @@ function render_navbar(ctx, navnode, edit_page_link::Bool)
# repourl() can sometimes return a nothing (Remotes.URL)
if !isnothing(url)
repo_title = "View the repository" * (isempty(host) ? "" : " on $host")
- push!(navbar_right.nodes,
- a[".docs-navbar-link", :href => url, :title => repo_title](
+ push!(
+ navbar_right.nodes,
+ a[".docs-navbar-link", :href=>url, :title=>repo_title](
span[".docs-icon.fa-brands"](logo),
- span[".docs-label.is-hidden-touch"](isempty(host) ? "Repository" : host)
+ span[".docs-label.is-hidden-touch"](
+ isempty(host) ? "Repository" : host
+ )
)
)
end
@@ -1267,24 +1400,33 @@ function render_navbar(ctx, navnode, edit_page_link::Bool)
# Add an edit link, with just an icon, but only on pages where edit_page_link is true.
# Some pages, like search, are special and do not have a source file to link to.
edit_page_link && edit_link(ctx, navnode) do logo, title, url
- push!(navbar_right.nodes,
- a[".docs-navbar-link", :href => url, :title => title](
+ push!(
+ navbar_right.nodes,
+ a[".docs-navbar-link", :href=>url, :title=>title](
span[".docs-icon.fa-solid"](logo)
)
)
end
# Settings cog
- push!(navbar_right.nodes, a[
- "#documenter-settings-button.docs-settings-button.docs-navbar-link.fa-solid.fa-gear",
- :href => "#", :title => "Settings",
- ])
+ push!(
+ navbar_right.nodes,
+ a[
+ "#documenter-settings-button.docs-settings-button.docs-navbar-link.fa-solid.fa-gear",
+ :href=>"#",
+ :title=>"Settings",
+ ]
+ )
# Collapse/Expand All articles toggle
- push!(navbar_right.nodes, a[
- "#documenter-article-toggle-button.docs-article-toggle-button.fa-solid.fa-chevron-up",
- :href=>"javascript:;", :title=>"Collapse all docstrings",
- ])
+ push!(
+ navbar_right.nodes,
+ a[
+ "#documenter-article-toggle-button.docs-article-toggle-button.fa-solid.fa-chevron-up",
+ :href=>"javascript:;",
+ :title=>"Collapse all docstrings",
+ ]
+ )
# Construct the main node that should be the first element in div.docs-main
header[".docs-navbar"](navbar_left, breadcrumb, navbar_right)
@@ -1298,7 +1440,8 @@ function edit_link(f, ctx, navnode)
# Let's fetch the edit path. Usually this is the source file of the page, but the user
# can override it specifying the EditURL option in an @meta block. Usually, it is a
# relative path pointing to a file, but can also be set to an absolute URL.
- editpath = get(getpage(ctx, navnode).globals.meta, :EditURL, getpage(ctx, navnode).source)
+ editpath =
+ get(getpage(ctx, navnode).globals.meta, :EditURL, getpage(ctx, navnode).source)
# If the user has set :EditURL to nothing, then the link will be disabled. Note: the
# .source field of a Page is always a String.
isnothing(editpath) && return
@@ -1344,20 +1487,26 @@ function edit_link(f, ctx, navnode)
end
# All these logos are from the .fa-brands (brands) class
-const host_logo_github = (host = "GitHub", logo = "\uf09b") # fa-github
-const host_logo_bitbucket = (host = "BitBucket", logo = "\uf171") # fa-bitbucket
-const host_logo_gitlab = (host = "GitLab", logo = "\uf296") # fa-gitlab
-const host_logo_azure = (host = "Azure DevOps", logo = "\uf3ca") # fa-microsoft; TODO: change to ADO logo when added to FontAwesome
-const host_logo_fallback = (host = "", logo = "\uf841") # fa-git-alt
+const host_logo_github = (host="GitHub", logo="\uf09b") # fa-github
+const host_logo_bitbucket = (host="BitBucket", logo="\uf171") # fa-bitbucket
+const host_logo_gitlab = (host="GitLab", logo="\uf296") # fa-gitlab
+const host_logo_azure = (host="Azure DevOps", logo="\uf3ca") # fa-microsoft; TODO: change to ADO logo when added to FontAwesome
+const host_logo_fallback = (host="", logo="\uf841") # fa-git-alt
host_logo(remote::Remotes.GitHub) = host_logo_github
host_logo(remote::Remotes.URL) = host_logo(remote.urltemplate)
host_logo(remote::Union{Remotes.Remote,Nothing}) = host_logo_fallback
function host_logo(remoteurl::String)
- occursin("github", remoteurl) ? host_logo_github :
- occursin("gitlab", remoteurl) ? host_logo_gitlab :
- occursin("bitbucket", remoteurl) ? host_logo_bitbucket :
- occursin("azure", remoteurl) ? host_logo_azure :
- host_logo_fallback
+ if occursin("github", remoteurl)
+ host_logo_github
+ elseif occursin("gitlab", remoteurl)
+ host_logo_gitlab
+ elseif occursin("bitbucket", remoteurl)
+ host_logo_bitbucket
+ elseif occursin("azure", remoteurl)
+ host_logo_azure
+ else
+ host_logo_fallback
+ end
end
function render_footer(ctx, navnode)
@@ -1367,13 +1516,19 @@ function render_footer(ctx, navnode)
if navnode.prev !== nothing
dctx = DCtx(ctx, navnode.prev, true)
title = domify(dctx, pagetitle(dctx))
- link = a[".docs-footer-prevpage", :href => navhref(ctx, navnode.prev, navnode)]("« ", title)
+ link = a[".docs-footer-prevpage", :href=>navhref(ctx, navnode.prev, navnode)](
+ "« ",
+ title
+ )
push!(navlinks, link)
end
if navnode.next !== nothing
dctx = DCtx(ctx, navnode.next, true)
title = domify(dctx, pagetitle(dctx))
- link = a[".docs-footer-nextpage", :href => navhref(ctx, navnode.next, navnode)](title, " »")
+ link = a[".docs-footer-nextpage", :href=>navhref(ctx, navnode.next, navnode)](
+ title,
+ " »"
+ )
push!(navlinks, link)
end
@@ -1409,17 +1564,18 @@ function render_article(ctx, navnode)
if !isempty(dctx.footnotes)
fnotes = map(dctx.footnotes) do f
# If there are any nested footnotes, they'll get ignored.
- dctx_footnote = DCtx(dctx, footnotes = nothing)
+ dctx_footnote = DCtx(dctx, footnotes=nothing)
fid = "footnote-$(f.element.id)"
citerefid = "citeref-$(f.element.id)"
- if length(f.children) == 1 && first(f.children).element isa MarkdownAST.Paragraph
+ if length(f.children) == 1 &&
+ first(f.children).element isa MarkdownAST.Paragraph
li["#$(fid).footnote"](
- a[".tag.is-link", :href => "#$(citerefid)"](f.element.id),
+ a[".tag.is-link", :href=>"#$(citerefid)"](f.element.id),
domify(dctx_footnote, first(f.children).children),
)
else
li["#$(fid).footnote"](
- a[".tag.is-link", :href => "#$(citerefid)"](f.element.id),
+ a[".tag.is-link", :href=>"#$(citerefid)"](f.element.id),
# passing an empty MD() as `parent` to give it block context
domify(dctx_footnote, f.children),
)
@@ -1444,13 +1600,18 @@ function expand_versions(dir, versions)
# filter and sort release folders
vnum(x) = VersionNumber(x)
version_folders = [x for x in available_folders if occursin(Base.VERSION_REGEX, x)]
- sort!(version_folders, lt = (x, y) -> vnum(x) < vnum(y), rev = true)
- release_folders = filter(x -> (v = vnum(x); v.prerelease == () && v.build == ()), version_folders)
+ sort!(version_folders, lt=(x, y) -> vnum(x) < vnum(y), rev=true)
+ release_folders =
+ filter(x -> (v = vnum(x); v.prerelease == () && v.build == ()), version_folders)
# pre_release_folders = filter(x -> (v = vnum(x); v.prerelease != () || v.build != ()), version_folders)
- major_folders = filter!(x -> (v = vnum(x); v.major != 0),
- unique(x -> (v = vnum(x); v.major), release_folders))
- minor_folders = filter!(x -> (v = vnum(x); !(v.major == 0 && v.minor == 0)),
- unique(x -> (v = vnum(x); (v.major, v.minor)), release_folders))
+ major_folders = filter!(
+ x -> (v = vnum(x); v.major != 0),
+ unique(x -> (v = vnum(x); v.major), release_folders)
+ )
+ minor_folders = filter!(
+ x -> (v = vnum(x); !(v.major == 0 && v.minor == 0)),
+ unique(x -> (v = vnum(x); (v.major, v.minor)), release_folders)
+ )
patch_folders = unique(x -> (v = vnum(x); (v.major, v.minor, v.patch)), release_folders)
filter!(x -> vnum(x) !== 0, major_folders)
@@ -1500,7 +1661,10 @@ function expand_versions(dir, versions)
# generate remaining symlinks
foreach(x -> push!(symlinks, "v$(vnum(x).major)" => x), major_folders)
foreach(x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor)" => x), minor_folders)
- foreach(x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor).$(vnum(x).patch)" => x), patch_folders)
+ foreach(
+ x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor).$(vnum(x).patch)" => x),
+ patch_folders
+ )
filter!(x -> x.first != x.second, unique!(symlinks))
# assert that none of the links point to another link
@@ -1515,7 +1679,7 @@ function expand_versions(dir, versions)
end
# write version file
-function generate_version_file(versionfile::AbstractString, entries, symlinks = [])
+function generate_version_file(versionfile::AbstractString, entries, symlinks=[])
open(versionfile, "w") do buf
println(buf, "var DOC_VERSIONS = [")
for folder in entries
@@ -1553,7 +1717,10 @@ function generate_redirect_file(redirectfile::AbstractString, entries)
open(redirectfile, "w") do buf
println(buf, comment)
- println(buf, "")
+ println(
+ buf,
+ ""
+ )
end
end
@@ -1616,18 +1783,19 @@ end
domify(dctx::DCtx, node::Node, ::MarkdownAST.Document) = domify(dctx, node.children)
function domify(dctx::DCtx, node::Node, ah::Documenter.AnchoredHeader)
- @assert length(node.children) == 1 && isa(first(node.children).element, MarkdownAST.Heading)
+ @assert length(node.children) == 1 &&
+ isa(first(node.children).element, MarkdownAST.Heading)
ctx, navnode = dctx.ctx, dctx.navnode
anchor = ah.anchor
# function domify(ctx, navnode, anchor::Anchor)
@tags a
frag = Documenter.anchor_fragment(anchor)
- legacy = anchor.nth == 1 ? (a[:id => lstrip(frag, '#')*"-1"],) : ()
+ legacy = anchor.nth == 1 ? (a[:id=>lstrip(frag, '#')*"-1"],) : ()
h = first(node.children)
- Tag(Symbol("h$(h.element.level)"))[:id => lstrip(frag, '#')](
- a[".docs-heading-anchor", :href => frag](domify(dctx, h.children)),
+ Tag(Symbol("h$(h.element.level)"))[:id=>lstrip(frag, '#')](
+ a[".docs-heading-anchor", :href=>frag](domify(dctx, h.children)),
legacy...,
- a[".docs-heading-anchor-permalink", :href => frag, :title => "Permalink"]
+ a[".docs-heading-anchor-permalink", :href=>frag, :title=>"Permalink"]
)
end
@@ -1645,7 +1813,7 @@ function push!(lb::ListBuilder, level, node)
if isempty(lb.es) || typeof(last(lb.es)) !== ListBuilder
push!(lb.es, ListBuilder())
end
- push!(last(lb.es), level-1, node)
+ push!(last(lb.es), level - 1, node)
end
end
@@ -1700,17 +1868,26 @@ function domify(dctx::DCtx, mdast_node::Node, node::Documenter.DocsNode)
@tags a code article header span
# push to search index
- rec = SearchRecord(ctx, navnode;
+ rec = SearchRecord(
+ ctx,
+ navnode;
fragment=Documenter.anchor_fragment(node.anchor),
title=string(node.object.binding),
category=Documenter.doccat(node.object),
- text = mdflatten(mdast_node))
+ text=mdflatten(mdast_node)
+ )
push!(ctx.search_index, rec)
article[".docstring"](
header(
- a[".docstring-article-toggle-button.fa-solid.fa-chevron-down", :href=>"javascript:;", :title=>"Collapse docstring"],
- a[".docstring-binding", :id=>node.anchor.id, :href=>"#$(node.anchor.id)"](code("$(node.object.binding)")),
+ a[
+ ".docstring-article-toggle-button.fa-solid.fa-chevron-down",
+ :href=>"javascript:;",
+ :title=>"Collapse docstring"
+ ],
+ a[".docstring-binding", :id=>node.anchor.id, :href=>"#$(node.anchor.id)"](
+ code("$(node.object.binding)")
+ ),
" — ", # —
span[".docstring-category"]("$(Documenter.doccat(node.object))")
),
@@ -1732,7 +1909,10 @@ function domify_doc(dctx::DCtx, node::Node)
if !ctx.settings.disable_git
url = Documenter.source_url(ctx.doc, result)
if url !== nothing
- push!(ret.nodes, a[".docs-sourcelink", :target=>"_blank", :href=>url]("source"))
+ push!(
+ ret.nodes,
+ a[".docs-sourcelink", :target=>"_blank", :href=>url]("source")
+ )
end
end
return ret
@@ -1761,7 +1941,11 @@ Prints a warning/error if the page goes over the `size_threshold` or `size_thres
limits, and in the former case also returns `false`, to report back to the caller that the
size threshold check failed.
"""
-function write_html(ctx::HTMLContext, navnode::Documenter.NavNode, page_html::DOM.HTMLDocument) :: Bool
+function write_html(
+ ctx::HTMLContext,
+ navnode::Documenter.NavNode,
+ page_html::DOM.HTMLDocument
+)::Bool
page_path = get_url(ctx, navnode)
buf = IOBuffer()
print(buf, page_html)
@@ -1813,7 +1997,7 @@ function format_units(size)
end
end
- return string(round(size, digits = 2), " (", unit, ")")
+ return string(round(size, digits=2), " (", unit, ")")
end
"""
@@ -1857,7 +2041,11 @@ size threshold, and returns the filename (that should be in the same directory a
corresponding HTML file). If the data is under the threshold, no file is created, and the
function returns `nothing`.
"""
-function write_data_file(dctx::DCtx, data::Union{Vector{UInt8},AbstractString}; suffix::AbstractString)
+function write_data_file(
+ dctx::DCtx,
+ data::Union{Vector{UInt8},AbstractString};
+ suffix::AbstractString
+)
ctx, navnode = dctx.ctx, dctx.navnode
# If we're under the threshold, we return `nothing`, indicating to the caller that
# they should inline the file instead.
@@ -1905,17 +2093,19 @@ function data_filename(dctx::DCtx, slug::AbstractString, suffix::AbstractString)
string(pagename, "-", slug)
end
# Now we need to find a valid file name, in case there are existing duplicates.
- filename = find_valid_data_file(joinpath(ctx.doc.user.build, dir), filename_prefix, suffix)
- return (;
- filename,
- path = joinpath(ctx.doc.user.build, dir, filename),
- )
+ filename =
+ find_valid_data_file(joinpath(ctx.doc.user.build, dir), filename_prefix, suffix)
+ return (; filename, path=joinpath(ctx.doc.user.build, dir, filename),)
end
-function find_valid_data_file(directory::AbstractString, prefix::AbstractString, suffix::AbstractString)
+function find_valid_data_file(
+ directory::AbstractString,
+ prefix::AbstractString,
+ suffix::AbstractString
+)
# We'll try 10_000 different filename.. if this doesn't work, then something is probably really
# badly wrong, and so we just crash.
- for i in 0:10_000
+ for i = 0:10_000
filename = if i == 0
string(prefix, suffix)
else
@@ -2016,7 +2206,8 @@ function pagetitle(dctx::DCtx)
[MarkdownAST.@ast("-")]
end
-mdflatten_pagetitle(dctx::DCtx) = sprint((io, ns) -> foreach(n -> mdflatten(io, n), ns), pagetitle(dctx))
+mdflatten_pagetitle(dctx::DCtx) =
+ sprint((io, ns) -> foreach(n -> mdflatten(io, n), ns), pagetitle(dctx))
"""
Returns an ordered list of tuples, `(toplevel, anchor, text)`, corresponding to level 1 and 2
@@ -2048,10 +2239,10 @@ function collect_subsections(page::MarkdownAST.Node)
return sections
end
-function domify_ansicoloredtext(text::AbstractString, class = "")
+function domify_ansicoloredtext(text::AbstractString, class="")
@tags pre
stack = DOM.Node[pre()] # this `pre` is dummy
- function cb(io::IO, printer, tag::String, attrs::Dict{Symbol, String})
+ function cb(io::IO, printer, tag::String, attrs::Dict{Symbol,String})
text = String(take!(io))
children = stack[end].nodes
isempty(text) || push!(children, Tag(Symbol("#RAW#"))(text))
@@ -2065,8 +2256,12 @@ function domify_ansicoloredtext(text::AbstractString, class = "")
return true
end
ansiclass = isempty(class) ? "ansi" : class * " ansi"
- printer = ANSIColoredPrinters.HTMLPrinter(IOBuffer(text), callback = cb,
- root_tag = "code", root_class = ansiclass)
+ printer = ANSIColoredPrinters.HTMLPrinter(
+ IOBuffer(text),
+ callback=cb,
+ root_tag="code",
+ root_class=ansiclass
+ )
show(IOBuffer(), MIME"text/html"(), printer)
return stack[1].nodes
end
@@ -2087,9 +2282,11 @@ function domify(dctx::DCtx, node::Node, e::MarkdownAST.Text)
return DOM.Node(text)
end
-domify(dctx::DCtx, node::Node, ::MarkdownAST.BlockQuote) = Tag(:blockquote)(domify(dctx, node.children))
+domify(dctx::DCtx, node::Node, ::MarkdownAST.BlockQuote) =
+ Tag(:blockquote)(domify(dctx, node.children))
-domify(dctx::DCtx, node::Node, ::MarkdownAST.Strong) = Tag(:strong)(domify(dctx, node.children))
+domify(dctx::DCtx, node::Node, ::MarkdownAST.Strong) =
+ Tag(:strong)(domify(dctx, node.children))
function domify(dctx::DCtx, node::Node, c::MarkdownAST.CodeBlock)
ctx, navnode, settings = dctx.ctx, dctx.navnode, dctx.settings
@@ -2099,7 +2296,8 @@ function domify(dctx::DCtx, node::Node, c::MarkdownAST.CodeBlock)
language = Documenter.codelang(language)
if language == "documenter-ansi" # From @repl blocks (through MultiCodeBlock)
return pre(domify_ansicoloredtext(c.code, "nohighlight hljs"))
- elseif settings !== nothing && settings.prerender &&
+ elseif settings !== nothing &&
+ settings.prerender &&
!(isempty(language) || language == "nohighlight")
r = hljs_prerender(c, settings)
r !== nothing && return r
@@ -2121,7 +2319,8 @@ function domify(dctx::DCtx, node::Node, mcb::Documenter.MultiCodeBlock)
push!(p.nodes, code)
# insert a
between output and the next input
if i != length(node.children) &&
- findnext(x -> x.element.info == mcb.language, collect(node.children), i + 1) == i + 1
+ findnext(x -> x.element.info == mcb.language, collect(node.children), i + 1) ==
+ i + 1
push!(p.nodes, br())
end
end
@@ -2147,7 +2346,7 @@ function hljs_prerender(c::MarkdownAST.CodeBlock, settings::HTML)
# return pre(code[".nohighlight $(lang) .hljs"](Tag(Symbol("#RAW#"))(str)))
return pre(code[".language-$(lang) .hljs"](Tag(Symbol("#RAW#"))(str)))
catch e
- @error "HTMLWriter: prerendering failed" exception=e stderr=String(take!(err))
+ @error "HTMLWriter: prerendering failed" exception = e stderr = String(take!(err))
end
return nothing
end
@@ -2159,7 +2358,7 @@ end
domify(dctx::DCtx, node::Node, ::MarkdownAST.ThematicBreak) = Tag(:hr)()
-const ImageElements = Union{MarkdownAST.Image, Documenter.LocalImage}
+const ImageElements = Union{MarkdownAST.Image,Documenter.LocalImage}
function domify(dctx::DCtx, node::Node, i::ImageElements)
ctx, navnode = dctx.ctx, dctx.navnode
alt = mdflatten(node.children)
@@ -2169,30 +2368,30 @@ function domify(dctx::DCtx, node::Node, i::ImageElements)
@tags video img a
if occursin(r"\.(webm|mp4|ogg|ogm|ogv|avi)$", url)
- video[:src => url, :controls => "true", :title => alt](
- a[:href => url](alt)
- )
+ video[:src=>url, :controls=>"true", :title=>alt](a[:href=>url](alt))
else
- img[:src => url, :alt => alt]
+ img[:src=>url, :alt=>alt]
end
end
domify(dctx::DCtx, node::Node, ::MarkdownAST.Emph) = Tag(:em)(domify(dctx, node.children))
-domify(dctx::DCtx, node::Node, m::MarkdownAST.DisplayMath) = Tag(:p)[".math-container"](string("\\[", m.math, "\\]"))
+domify(dctx::DCtx, node::Node, m::MarkdownAST.DisplayMath) =
+ Tag(:p)[".math-container"](string("\\[", m.math, "\\]"))
-domify(dctx::DCtx, node::Node, m::MarkdownAST.InlineMath) = Tag(:span)(string('$', m.math, '$'))
+domify(dctx::DCtx, node::Node, m::MarkdownAST.InlineMath) =
+ Tag(:span)(string('$', m.math, '$'))
domify(dctx::DCtx, node::Node, m::MarkdownAST.LineBreak) = Tag(:br)()
# TODO: Implement SoftBreak, Backslash (but they don't appear in standard library Markdown conversions)
-const LinkElements = Union{MarkdownAST.Link, Documenter.PageLink, Documenter.LocalLink}
+const LinkElements = Union{MarkdownAST.Link,Documenter.PageLink,Documenter.LocalLink}
function domify(dctx::DCtx, node::Node, link::LinkElements)
droplinks = dctx.droplinks
url = filehref(dctx, node, link)
# function mdconvert(link::Markdown.Link, parent; droplinks=false, kwargs...)
link_text = domify(dctx, node.children)
- droplinks ? link_text : Tag(:a)[:href => url](link_text)
+ droplinks ? link_text : Tag(:a)[:href=>url](link_text)
end
function domify(dctx::DCtx, node::Node, list::MarkdownAST.List)
@@ -2208,8 +2407,11 @@ function domify(dctx::DCtx, node::Node, ::MarkdownAST.Paragraph)
# See also: https://github.com/JuliaLang/julia/pull/26598
is_in_tight_list(node) ? content : Tag(:p)(content)
end
-is_in_tight_list(node::Node) = !isnothing(node.parent) && isa(node.parent.element, MarkdownAST.Item) &&
- !isnothing(node.parent.parent) && isa(node.parent.parent.element, MarkdownAST.List) &&
+is_in_tight_list(node::Node) =
+ !isnothing(node.parent) &&
+ isa(node.parent.element, MarkdownAST.Item) &&
+ !isnothing(node.parent.parent) &&
+ isa(node.parent.parent.element, MarkdownAST.List) &&
node.parent.parent.element.tight
function domify(dctx::DCtx, node::Node, t::MarkdownAST.Table)
@@ -2227,11 +2429,11 @@ function domify(dctx::DCtx, node::Node, t::MarkdownAST.Table)
end
table(
tr(map(enumerate(th_row.children)) do (i, x)
- th[:style => alignment_style[i]](domify(dctx, x.children))
+ th[:style=>alignment_style[i]](domify(dctx, x.children))
end),
map(tbody_rows) do x
tr(map(enumerate(x.children)) do (i, y) # each cell in a row
- td[:style => alignment_style[i]](domify(dctx, y.children))
+ td[:style=>alignment_style[i]](domify(dctx, y.children))
end)
end
)
@@ -2253,7 +2455,9 @@ end
function domify(dctx::DCtx, node::Node, f::MarkdownAST.FootnoteLink)
@tags sup a
- sup[".footnote-reference"](a["#citeref-$(f.id)", :href => "#footnote-$(f.id)"]("[$(f.id)]"))
+ sup[".footnote-reference"](
+ a["#citeref-$(f.id)", :href=>"#footnote-$(f.id)"]("[$(f.id)]")
+ )
end
function domify(dctx::DCtx, node::Node, f::MarkdownAST.FootnoteDefinition)
# As we run through the document to generate the document, we won't render the footnote
@@ -2276,50 +2480,55 @@ end
function domify(dctx::DCtx, node::Node, a::MarkdownAST.Admonition)
@tags header div details summary
- colorclass =
- (a.category == "danger") ? ".is-danger" :
- (a.category == "warning") ? ".is-warning" :
- (a.category == "note") ? ".is-info" :
- (a.category == "info") ? ".is-info" :
- (a.category == "tip") ? ".is-success" :
- (a.category == "compat") ? ".is-compat" : begin
- # If the admonition category is not one of the standard ones, we tag the
- # admonition div element with a `is-category-$(category)` class. However, we
- # first carefully sanitize the category name. Strictly speaking, this is not
- # necessary when were using the Markdown parser in the Julia standard library,
- # since it restricts the category to [a-z]+. But it is possible for the users to
- # construct their own Admonition objects with arbitrary category strings and
- # pass them onto Documenter.
- #
- # (1) remove all characters except A-Z, a-z, 0-9 and -
- cat_sanitized = replace(a.category, r"[^A-Za-z0-9-]" => "")
- # (2) remove any dashes from the beginning and end of the string
- cat_sanitized = replace(cat_sanitized, r"^[-]+" => "")
- cat_sanitized = replace(cat_sanitized, r"[-]+$" => "")
- # (3) reduce any duplicate dashes in the middle to single dashes
- cat_sanitized = replace(cat_sanitized, r"[-]+" => "-")
- cat_sanitized = lowercase(cat_sanitized)
- # (4) if nothing is left (or the category was empty to begin with), we don't
- # apply a class
- isempty(cat_sanitized) ? "" : ".is-category-$(cat_sanitized)"
- end
+ colorclass = if (a.category == "danger")
+ ".is-danger"
+ elseif (a.category == "warning")
+ ".is-warning"
+ elseif (a.category == "note")
+ ".is-info"
+ elseif (a.category == "info")
+ ".is-info"
+ elseif (a.category == "tip")
+ ".is-success"
+ elseif (a.category == "compat")
+ ".is-compat"
+ else
+ begin
+ # If the admonition category is not one of the standard ones, we tag the
+ # admonition div element with a `is-category-$(category)` class. However, we
+ # first carefully sanitize the category name. Strictly speaking, this is not
+ # necessary when were using the Markdown parser in the Julia standard library,
+ # since it restricts the category to [a-z]+. But it is possible for the users to
+ # construct their own Admonition objects with arbitrary category strings and
+ # pass them onto Documenter.
+ #
+ # (1) remove all characters except A-Z, a-z, 0-9 and -
+ cat_sanitized = replace(a.category, r"[^A-Za-z0-9-]" => "")
+ # (2) remove any dashes from the beginning and end of the string
+ cat_sanitized = replace(cat_sanitized, r"^[-]+" => "")
+ cat_sanitized = replace(cat_sanitized, r"[-]+$" => "")
+ # (3) reduce any duplicate dashes in the middle to single dashes
+ cat_sanitized = replace(cat_sanitized, r"[-]+" => "-")
+ cat_sanitized = lowercase(cat_sanitized)
+ # (4) if nothing is left (or the category was empty to begin with), we don't
+ # apply a class
+ isempty(cat_sanitized) ? "" : ".is-category-$(cat_sanitized)"
+ end
+ end
inner_div = div[".admonition-body"](domify(dctx, node.children))
if a.category == "details"
# details admonitions are rendered as blocks
- details[".admonition.is-details"](
- summary[".admonition-header"](a.title), inner_div
- )
+ details[".admonition.is-details"](summary[".admonition-header"](a.title), inner_div)
else
- div[".admonition$(colorclass)"](
- header[".admonition-header"](a.title), inner_div
- )
+ div[".admonition$(colorclass)"](header[".admonition-header"](a.title), inner_div)
end
end
# Select the "best" representation for HTML output.
domify(dctx::DCtx, node::Node, ::Documenter.MultiOutput) = domify(dctx, node.children)
-domify(dctx::DCtx, node::Node, moe::Documenter.MultiOutputElement) = Base.invokelatest(domify, dctx, node, moe.element)
+domify(dctx::DCtx, node::Node, moe::Documenter.MultiOutputElement) =
+ Base.invokelatest(domify, dctx, node, moe.element)
function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any})
rawhtml(code) = Tag(Symbol("#RAW#"))(code)
@@ -2344,7 +2553,7 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any})
dom = if length(svg) >= dctx.ctx.settings.example_size_threshold
filename = write_data_file(dctx, svg; suffix=".svg")
@assert !isnothing(filename)
- img[:src => filename, :alt => "Example block output"]
+ img[:src=>filename, :alt=>"Example block output"]
elseif svg_tag_match === nothing
# There is no svg tag so we don't do any more advanced
# processing and just return the svg as HTML.
@@ -2358,7 +2567,11 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any})
svg_tag = svg_tag_match.match
xmlns_present = occursin("xmlns", svg_tag)
if !xmlns_present
- svg = replace(svg, "