diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml new file mode 100644 index 0000000000..0661727bba --- /dev/null +++ b/.JuliaFormatter.toml @@ -0,0 +1,20 @@ +margin = 92 +indent = 4 +remove_extra_newlines = false +always_use_return = false +short_to_long_function_def = false +always_for_in = false +import_to_using = false +pipe_to_function_call = false +whitespace_ops_in_indices = false +whitespace_in_kwargs = false +whitespace_typedefs = false +annotate_untyped_fields_with_any = false +format_docstrings = false +conditional_to_if = true +normalize_line_endings = "unix" +align_assignment = true +align_struct_field = true +align_pair_arrow = true +align_matrix = true +trailing_comma = "nothing" diff --git a/docs/DocumenterShowcase.jl b/docs/DocumenterShowcase.jl index 8f0d29e1a3..f4a8a77b67 100644 --- a/docs/DocumenterShowcase.jl +++ b/docs/DocumenterShowcase.jl @@ -89,17 +89,20 @@ function hello(who) end struct SVGCircle - stroke :: String - fill :: String + stroke::String + fill::String end function Base.show(io, ::MIME"image/svg+xml", c::SVGCircle) - write(io, """ - - - - - - """) + write( + io, + """ + + + + + +""" + ) end "The type definition." @@ -109,6 +112,6 @@ struct Foo{T,S} end Foo() = Foo{Nothing,Nothing}() "Constructor `Foo{T}()` with one parametric argument." -Foo{T}() where T = Foo{T,Nothing}() +Foo{T}() where {T} = Foo{T,Nothing}() end # module diff --git a/docs/changelog.jl b/docs/changelog.jl index 4e83a1de94..a91b7f253c 100644 --- a/docs/changelog.jl +++ b/docs/changelog.jl @@ -3,5 +3,5 @@ using Changelog Changelog.generate( Changelog.CommonMark(), joinpath(@__DIR__, "..", "CHANGELOG.md"); - repo = "JuliaDocs/Documenter.jl", + repo="JuliaDocs/Documenter.jl", ) diff --git a/docs/instantiate.jl b/docs/instantiate.jl index f5d0508941..e0b676f4aa 100644 --- a/docs/instantiate.jl +++ b/docs/instantiate.jl @@ -11,12 +11,14 @@ cd(project_directory) do @info "DocumenterTools already cloned to dev/DocumenterTools" run(`git -C dev/DocumenterTools fetch origin`) else - run(`git clone -n https://github.com/JuliaDocs/DocumenterTools.jl.git dev/DocumenterTools`) + run( + `git clone -n https://github.com/JuliaDocs/DocumenterTools.jl.git dev/DocumenterTools` + ) end run(`git -C dev/DocumenterTools checkout documenter-v0.1.17+1.0.0`) Pkg.develop([ - PackageSpec(path = documenter_directory), - PackageSpec(path = "dev/DocumenterTools"), + PackageSpec(path=documenter_directory), + PackageSpec(path="dev/DocumenterTools"), ]) Pkg.instantiate() end diff --git a/docs/make.jl b/docs/make.jl index 856d62b7dc..d49d05074f 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -15,59 +15,63 @@ Changelog.generate( Changelog.Documenter(), joinpath(@__DIR__, "..", "CHANGELOG.md"), joinpath(@__DIR__, "src", "release-notes.md"); - repo = "JuliaDocs/Documenter.jl", + repo="JuliaDocs/Documenter.jl", ) makedocs( - modules = [Documenter, DocumenterTools, DocumenterShowcase], - format = if "pdf" in ARGS - Documenter.LaTeX(platform = "docker") + modules=[Documenter, DocumenterTools, DocumenterShowcase], + format=if "pdf" in ARGS + Documenter.LaTeX(platform="docker") else Documenter.HTML( # Use clean URLs, unless built as a "local" build - prettyurls = !("local" in ARGS), - canonical = "https://documenter.juliadocs.org/stable/", - assets = ["assets/favicon.ico"], - analytics = "UA-136089579-2", - highlights = ["yaml"], - ansicolor = true, - size_threshold_ignore = ["release-notes.md"], + prettyurls=!("local" in ARGS), + canonical="https://documenter.juliadocs.org/stable/", + assets=["assets/favicon.ico"], + analytics="UA-136089579-2", + highlights=["yaml"], + ansicolor=true, + size_threshold_ignore=["release-notes.md"], ) end, - build = ("pdf" in ARGS) ? "build-pdf" : "build", - debug = ("pdf" in ARGS), - sitename = "Documenter.jl", - authors = "Michael Hatherly, Morten Piibeleht, and contributors.", - linkcheck = "linkcheck" in ARGS, - linkcheck_ignore = [ + build=("pdf" in ARGS) ? "build-pdf" : "build", + debug=("pdf" in ARGS), + sitename="Documenter.jl", + authors="Michael Hatherly, Morten Piibeleht, and contributors.", + linkcheck="linkcheck" in ARGS, + linkcheck_ignore=[ # We'll ignore links that point to GitHub's edit pages, as they redirect to the # login screen and cause a warning: r"https://github.com/([A-Za-z0-9_.-]+)/([A-Za-z0-9_.-]+)/edit(.*)", "https://nvd.nist.gov/vuln/detail/CVE-2018-16487", - ] ∪ (get(ENV, "GITHUB_ACTIONS", nothing) == "true" ? [ - # Extra ones we ignore only on CI. - # - # It seems that CTAN blocks GitHub Actions? - "https://ctan.org/pkg/minted", - ] : []), - pages = [ + ] ∪ ( + if get(ENV, "GITHUB_ACTIONS", nothing) == "true" + [ + # Extra ones we ignore only on CI. + # + # It seems that CTAN blocks GitHub Actions? + "https://ctan.org/pkg/minted", + ] + else + [] + end + ), + pages=[ "Home" => "index.md", "Manual" => Any[ - "Guide" => "man/guide.md", + "Guide"=>"man/guide.md", "man/examples.md", "man/syntax.md", "man/doctests.md", "man/latex.md", - hide("man/hosting.md", [ - "man/hosting/walkthrough.md" - ]), + hide("man/hosting.md", ["man/hosting/walkthrough.md"]), "man/other-formats.md", ], "showcase.md", "Reference" => Any[ - "Public API" => "lib/public.md", + "Public API"=>"lib/public.md", "lib/remote-links.md", - "Semantic versioning" => "lib/semver.md", + "Semantic versioning"=>"lib/semver.md", ], "Developers" => [ "contributing.md", @@ -79,8 +83,8 @@ makedocs( ], "release-notes.md", ], - warnonly = ("strict=false" in ARGS), - doctest = ("doctest=only" in ARGS) ? :only : true, + warnonly=("strict=false" in ARGS), + doctest=("doctest=only" in ARGS) ? :only : true, ) if "pdf" in ARGS @@ -89,21 +93,23 @@ if "pdf" in ARGS let files = readdir(joinpath(@__DIR__, "build-pdf")) for f in files if startswith(f, "Documenter.jl") && endswith(f, ".pdf") - mv(joinpath(@__DIR__, "build-pdf", f), - joinpath(@__DIR__, "build-pdf", "commit", f)) + mv( + joinpath(@__DIR__, "build-pdf", f), + joinpath(@__DIR__, "build-pdf", "commit", f) + ) end end end deploydocs( - repo = "github.com/JuliaDocs/Documenter.jl.git", - target = "pdf/build-pdf/commit", - branch = "gh-pages-pdf", - forcepush = true, + repo="github.com/JuliaDocs/Documenter.jl.git", + target="pdf/build-pdf/commit", + branch="gh-pages-pdf", + forcepush=true, ) else deploydocs( - repo = "github.com/JuliaDocs/Documenter.jl.git", - target = "build", - push_preview = true, + repo="github.com/JuliaDocs/Documenter.jl.git", + target="build", + push_preview=true, ) end diff --git a/src/DocMeta.jl b/src/DocMeta.jl index fedf1a70b8..a5f63bdfe6 100644 --- a/src/DocMeta.jl +++ b/src/DocMeta.jl @@ -68,8 +68,16 @@ If `recursive` is set to `true`, it sets the same metadata value for all the sub If `warn` is `true`, it prints a warning when `key` already exists and it gets rewritten. """ function setdocmeta!(m::Module, key::Symbol, value; warn=true, recursive=false) - key in keys(VALIDMETA) || throw(ArgumentError("Invalid metadata key\nValid keys are: $(join(keys(VALIDMETA), ", "))")) - isa(value, VALIDMETA[key]) || throw(ArgumentError("Bad value type ($(typeof(value))) for metadata key $(key). Must be <: $(VALIDMETA[key])")) + key in keys(VALIDMETA) || throw( + ArgumentError( + "Invalid metadata key\nValid keys are: $(join(keys(VALIDMETA), ", "))" + ) + ) + isa(value, VALIDMETA[key]) || throw( + ArgumentError( + "Bad value type ($(typeof(value))) for metadata key $(key). Must be <: $(VALIDMETA[key])" + ) + ) if recursive for mod in Documenter.submodules(m) setdocmeta!(mod, key, value; warn=warn, recursive=false) diff --git a/src/DocSystem.jl b/src/DocSystem.jl index 0124d35870..468a4cfdf5 100644 --- a/src/DocSystem.jl +++ b/src/DocSystem.jl @@ -42,8 +42,10 @@ binding(f::Function) = binding(parentmodule(f), nameof(f)) # # Note that `IntrinsicFunction` is exported from `Base` in `0.4`, but not in `0.5`. # -let INTRINSICS = Dict(map(s -> getfield(Core.Intrinsics, s) => s, names(Core.Intrinsics, all=true))) - global binding(i::Core.IntrinsicFunction) = binding(Core.Intrinsics, INTRINSICS[i]::Symbol) +let INTRINSICS = + Dict(map(s -> getfield(Core.Intrinsics, s) => s, names(Core.Intrinsics, all=true))) + global binding(i::Core.IntrinsicFunction) = + binding(Core.Intrinsics, INTRINSICS[i]::Symbol) end # @@ -59,11 +61,15 @@ end # # Pseudo-eval of `Expr`s to find their equivalent `Binding`. # -binding(m::Module, x::Expr) = - Meta.isexpr(x, :.) ? binding(getmod(m, x.args[1]), x.args[2].value) : - Meta.isexpr(x, [:call, :macrocall, :curly]) ? binding(m, x.args[1]) : - Meta.isexpr(x, :where) ? binding(m, x.args[1].args[1]) : - error("`binding` cannot understand expression `$x`.") +binding(m::Module, x::Expr) = if Meta.isexpr(x, :.) + binding(getmod(m, x.args[1]), x.args[2].value) +elseif Meta.isexpr(x, [:call, :macrocall, :curly]) + binding(m, x.args[1]) +elseif Meta.isexpr(x, :where) + binding(m, x.args[1].args[1]) +else + error("`binding` cannot understand expression `$x`.") +end # Helper methods for the above `binding` method. getmod(m::Module, x::Expr) = getfield(getmod(m, x.args[1]), x.args[2].value) @@ -114,7 +120,7 @@ The optional keyword arguments are used to add new data to the `DocStr`'s `.data` dictionary. """ function docstr(md::Markdown.MD; kws...) - data = Dict{Symbol, Any}( + data = Dict{Symbol,Any}( :path => md.meta[:path], :module => md.meta[:module], :linenumber => 0, @@ -146,7 +152,7 @@ function convertmeta(meta::IdDict{Any,Any}) if !haskey(CACHED, meta) docs = IdDict{Any,Any}() for (k, v) in meta - if !isa(k, Union{Number, AbstractString, IdDict{Any,Any}}) + if !isa(k, Union{Number,AbstractString,IdDict{Any,Any}}) docs[binding(k)] = multidoc(v) end end @@ -171,11 +177,11 @@ Find all `DocStr` objects that match the provided arguments exactly. Return a `Vector{DocStr}` ordered by definition order. """ function getspecificdocs( - binding::Docs.Binding, - typesig::Type = Union{}, - compare = (==), - modules = Docs.modules, - ) + binding::Docs.Binding, + typesig::Type=Union{}, + compare=(==), + modules=Docs.modules, +) # Fall back to searching all modules if user provides no modules. modules = isempty(modules) ? Docs.modules : modules # Keywords are special-cased within the docsystem. Handle those first. @@ -208,12 +214,12 @@ That is, if [`getspecificdocs`](@ref) fails, get docs for aliases of try getting docs for `<:`. """ function getdocs( - binding::Docs.Binding, - typesig::Type = Union{}; - compare = (==), - modules = Docs.modules, - aliases = true, - ) + binding::Docs.Binding, + typesig::Type=Union{}; + compare=(==), + modules=Docs.modules, + aliases=true, +) # First, we try to find the docs that _exactly_ match the binding. If you # have aliases, you can have a separate docstring attached to the alias. results = getspecificdocs(binding, typesig, compare, modules) @@ -244,7 +250,7 @@ searching for the `Binding` in the docsystem. Note that when conversion fails this method returns an empty `Vector{DocStr}`. """ -function getdocs(object::Any, typesig::Type = Union{}; kws...) +function getdocs(object::Any, typesig::Type=Union{}; kws...) binding = aliasof(object, object) binding === object ? DocStr[] : getdocs(binding, typesig; kws...) end @@ -287,7 +293,7 @@ in case `Base.Docs.parsedoc` fails with an exception. """ function parsedoc(docstr::DocStr) md = try - Base.Docs.parsedoc(docstr) :: Markdown.MD + Base.Docs.parsedoc(docstr)::Markdown.MD catch exception @error """ parsedoc failed to parse a docstring into Markdown. This indicates a problem with the docstring. diff --git a/src/Documenter.jl b/src/Documenter.jl index 3c9e066714..0c9d56ebf7 100644 --- a/src/Documenter.jl +++ b/src/Documenter.jl @@ -38,15 +38,24 @@ const DOCUMENTER_VERSION = let end # Potentially sensitive variables to be removed from environment when not needed -const NO_KEY_ENV = Dict( - "DOCUMENTER_KEY" => nothing, - "DOCUMENTER_KEY_PREVIEWS" => nothing, -) +const NO_KEY_ENV = Dict("DOCUMENTER_KEY" => nothing, "DOCUMENTER_KEY_PREVIEWS" => nothing,) # Names of possible internal errors -const ERROR_NAMES = [:autodocs_block, :cross_references, :docs_block, :doctest, - :eval_block, :example_block, :footnote, :linkcheck_remotes, :linkcheck, - :meta_block, :missing_docs, :parse_error, :setup_block] +const ERROR_NAMES = [ + :autodocs_block, + :cross_references, + :docs_block, + :doctest, + :eval_block, + :example_block, + :footnote, + :linkcheck_remotes, + :linkcheck, + :meta_block, + :missing_docs, + :parse_error, + :setup_block +] """ abstract type Plugin end @@ -97,8 +106,8 @@ import .LaTeXWriter: LaTeX # User Interface. # --------------- -export makedocs, deploydocs, hide, doctest, DocMeta, asset, Remotes, - KaTeX, MathJax, MathJax2, MathJax3 +export makedocs, + deploydocs, hide, doctest, DocMeta, asset, Remotes, KaTeX, MathJax, MathJax2, MathJax3 include("makedocs.jl") include("deployconfig.jl") diff --git a/src/anchors.jl b/src/anchors.jl index 35fe92797f..573e1c1a3b 100644 --- a/src/anchors.jl +++ b/src/anchors.jl @@ -24,7 +24,7 @@ mutable struct Anchor nth :: Int # Reverse-lookup of .object for MarkdownAST trees. This is intentionally # uninitialized until set in Documenter.markdownast() - node :: MarkdownAST.Node{Nothing} + node::MarkdownAST.Node{Nothing} Anchor(object) = new(object, 0, "", "", 1) end @@ -38,8 +38,8 @@ Tree structure representing anchors in a document and their relationships with e Each `id` maps to a `file` which in turn maps to a vector of `Anchor` objects. """ mutable struct AnchorMap - map :: Dict{String, Dict{String, Vector{Anchor}}} - count :: Int + map::Dict{String,Dict{String,Vector{Anchor}}} + count::Int AnchorMap() = new(Dict(), 0) end @@ -55,7 +55,7 @@ Either an actual [`Anchor`](@ref) object may be provided or any other object whi automatically wrapped in an [`Anchor`](@ref) before being added to the [`AnchorMap`](@ref). """ function anchor_add!(m::AnchorMap, anchor::Anchor, id, file) - filemap = get!(m.map, id, Dict{String, Vector{Anchor}}()) + filemap = get!(m.map, id, Dict{String,Vector{Anchor}}()) anchors = get!(filemap, file, Anchor[]) push!(anchors, anchor) anchor.order = m.count += 1 @@ -75,9 +75,10 @@ $(SIGNATURES) Does the given `id` exist within the [`AnchorMap`](@ref)? A `file` and integer `n` may also be provided to narrow the search for existence. """ -anchor_exists(m::AnchorMap, id, file, n) = anchor_exists(m, id, file) && 1 ≤ n ≤ length(m.map[id][file]) -anchor_exists(m::AnchorMap, id, file) = anchor_exists(m, id) && haskey(m.map[id], file) -anchor_exists(m::AnchorMap, id) = haskey(m.map, id) +anchor_exists(m::AnchorMap, id, file, n) = + anchor_exists(m, id, file) && 1 ≤ n ≤ length(m.map[id][file]) +anchor_exists(m::AnchorMap, id, file) = anchor_exists(m, id) && haskey(m.map[id], file) +anchor_exists(m::AnchorMap, id) = haskey(m.map, id) # Anchor uniqueness. # ------------------ @@ -89,12 +90,11 @@ Is the `id` unique within the given [`AnchorMap`](@ref)? May also specify the `f """ function anchor_isunique(m::AnchorMap, id) anchor_exists(m, id) && - length(m.map[id]) === 1 && - anchor_isunique(m, id, first(first(m.map[id]))) + length(m.map[id]) === 1 && + anchor_isunique(m, id, first(first(m.map[id]))) end function anchor_isunique(m::AnchorMap, id, file) - anchor_exists(m, id, file) && - length(m.map[id][file]) === 1 + anchor_exists(m, id, file) && length(m.map[id][file]) === 1 end # Get anchor. @@ -107,19 +107,13 @@ Returns the [`Anchor`](@ref) object matching `id`. `file` and `n` may also be pr `Anchor` is returned, or `nothing` in case of no match. """ function anchor(m::AnchorMap, id) - anchor_isunique(m, id) ? - anchor(m, id, first(first(m.map[id])), 1) : - nothing + anchor_isunique(m, id) ? anchor(m, id, first(first(m.map[id])), 1) : nothing end function anchor(m::AnchorMap, id, file) - anchor_isunique(m, id, file) ? - anchor(m, id, file, 1) : - nothing + anchor_isunique(m, id, file) ? anchor(m, id, file, 1) : nothing end function anchor(m::AnchorMap, id, file, n) - anchor_exists(m, id, file, n) ? - m.map[id][file][n] : - nothing + anchor_exists(m, id, file, n) ? m.map[id][file][n] : nothing end """ diff --git a/src/builder_pipeline.jl b/src/builder_pipeline.jl index af98526ae3..7bcd0e8f8f 100644 --- a/src/builder_pipeline.jl +++ b/src/builder_pipeline.jl @@ -6,57 +6,57 @@ These actions may involve creating directory structures, expanding templates, ru doctests, etc. """ module Builder - import ..Documenter - import ..Documenter.Selectors - - """ - The default document processing "pipeline", which consists of the following actions: - - - [`SetupBuildDirectory`](@ref) - - [`Doctest`](@ref) - - [`ExpandTemplates`](@ref) - - [`CheckDocument`](@ref) - - [`Populate`](@ref) - - [`RenderDocument`](@ref) - - """ - abstract type DocumentPipeline <: Selectors.AbstractSelector end - - """ - Creates the correct directory layout within the `build` folder and parses markdown files. - """ - abstract type SetupBuildDirectory <: DocumentPipeline end - - """ - Runs all the doctests in all docstrings and Markdown files. - """ - abstract type Doctest <: DocumentPipeline end - - """ - Executes a sequence of actions on each node of the parsed markdown files in turn. - """ - abstract type ExpandTemplates <: DocumentPipeline end - - """ - Finds and sets URLs for each `@ref` link in the document to the correct destinations. - """ - abstract type CrossReferences <: DocumentPipeline end - - """ - Checks that all documented objects are included in the document and runs doctests on all - valid Julia code blocks. - """ - abstract type CheckDocument <: DocumentPipeline end - - """ - Populates the `ContentsNode`s and `IndexNode`s with links. - """ - abstract type Populate <: DocumentPipeline end - - """ - Writes the document tree to the `build` directory. - """ - abstract type RenderDocument <: DocumentPipeline end +import ..Documenter +import ..Documenter.Selectors + +""" +The default document processing "pipeline", which consists of the following actions: + +- [`SetupBuildDirectory`](@ref) +- [`Doctest`](@ref) +- [`ExpandTemplates`](@ref) +- [`CheckDocument`](@ref) +- [`Populate`](@ref) +- [`RenderDocument`](@ref) + +""" +abstract type DocumentPipeline <: Selectors.AbstractSelector end + +""" +Creates the correct directory layout within the `build` folder and parses markdown files. +""" +abstract type SetupBuildDirectory <: DocumentPipeline end + +""" +Runs all the doctests in all docstrings and Markdown files. +""" +abstract type Doctest <: DocumentPipeline end + +""" +Executes a sequence of actions on each node of the parsed markdown files in turn. +""" +abstract type ExpandTemplates <: DocumentPipeline end + +""" +Finds and sets URLs for each `@ref` link in the document to the correct destinations. +""" +abstract type CrossReferences <: DocumentPipeline end + +""" +Checks that all documented objects are included in the document and runs doctests on all +valid Julia code blocks. +""" +abstract type CheckDocument <: DocumentPipeline end + +""" +Populates the `ContentsNode`s and `IndexNode`s with links. +""" +abstract type Populate <: DocumentPipeline end + +""" +Writes the document tree to the `build` directory. +""" +abstract type RenderDocument <: DocumentPipeline end end Selectors.order(::Type{Builder.SetupBuildDirectory}) = 1.0 @@ -67,15 +67,16 @@ Selectors.order(::Type{Builder.CheckDocument}) = 4.0 Selectors.order(::Type{Builder.Populate}) = 5.0 Selectors.order(::Type{Builder.RenderDocument}) = 6.0 -Selectors.matcher(::Type{T}, doc::Documenter.Document) where {T <: Builder.DocumentPipeline} = true +Selectors.matcher(::Type{T}, doc::Documenter.Document) where {T<:Builder.DocumentPipeline} = + true -Selectors.strict(::Type{T}) where {T <: Builder.DocumentPipeline} = false +Selectors.strict(::Type{T}) where {T<:Builder.DocumentPipeline} = false function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.Document) @info "SetupBuildDirectory: setting up build directory." # Frequently used fields. - build = doc.user.build + build = doc.user.build source = doc.user.source workdir = doc.user.workdir @@ -84,7 +85,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D # We create the .user.build directory. # If .user.clean is set, we first clean the existing directory. - doc.user.clean && isdir(build) && rm(build; recursive = true) + doc.user.clean && isdir(build) && rm(build; recursive=true) isdir(build) || mkpath(build) # We'll walk over all the files in the .user.source directory. @@ -117,7 +118,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D push!(mdpages, Documenter.srcpath(source, root, file)) Documenter.addpage!(doc, src, dst, wd) else - cp(src, dst; force = true) + cp(src, dst; force=true) end end end @@ -136,7 +137,7 @@ function Selectors.runner(::Type{Builder.SetupBuildDirectory}, doc::Documenter.D # Finally we populate the .next and .prev fields of the navnodes that point # to actual pages. - local prev::Union{Documenter.NavNode, Nothing} = nothing + local prev::Union{Documenter.NavNode,Nothing} = nothing for navnode in doc.internal.navlist navnode.prev = prev if prev !== nothing @@ -163,8 +164,8 @@ string sorting, except for prioritizing `index.md` (i.e. `index.md` always comes """ function lt_page(a, b) # note: length("index.md") == 8 - a = endswith(a, "index.md") ? chop(a; tail = 8) : a - b = endswith(b, "index.md") ? chop(b; tail = 8) : b + a = endswith(a, "index.md") ? chop(a; tail=8) : a + b = endswith(b, "index.md") ? chop(b; tail=8) : b return a < b end @@ -196,11 +197,14 @@ function walk_navpages(hps::Tuple, parent, doc) walk_navpages(hps..., parent, doc) end -walk_navpages(title::String, children::Vector, parent, doc) = walk_navpages(true, title, nothing, children, parent, doc) -walk_navpages(title::String, page, parent, doc) = walk_navpages(true, title, page, [], parent, doc) +walk_navpages(title::String, children::Vector, parent, doc) = + walk_navpages(true, title, nothing, children, parent, doc) +walk_navpages(title::String, page, parent, doc) = + walk_navpages(true, title, page, [], parent, doc) walk_navpages(p::Pair, parent, doc) = walk_navpages(p.first, p.second, parent, doc) -walk_navpages(ps::Vector, parent, doc) = [walk_navpages(p, parent, doc)::Documenter.NavNode for p in ps] +walk_navpages(ps::Vector, parent, doc) = + [walk_navpages(p, parent, doc)::Documenter.NavNode for p in ps] walk_navpages(src::String, parent, doc) = walk_navpages(true, nothing, src, [], parent, doc) function Selectors.runner(::Type{Builder.Doctest}, doc::Documenter.Document) @@ -209,7 +213,9 @@ function Selectors.runner(::Type{Builder.Doctest}, doc::Documenter.Document) _doctest(doc.blueprint, doc) num_errors = length(doc.internal.errors) if (doc.user.doctest === :only || is_strict(doc, :doctest)) && num_errors > 0 - error("`makedocs` encountered $(num_errors > 1 ? "$(num_errors) doctest errors" : "a doctest error"). Terminating build") + error( + "`makedocs` encountered $(num_errors > 1 ? "$(num_errors) doctest errors" : "a doctest error"). Terminating build" + ) end else @info "Doctest: skipped." @@ -250,9 +256,11 @@ function Selectors.runner(::Type{Builder.RenderDocument}, doc::Documenter.Docume fatal_errors = filter(is_strict(doc), doc.internal.errors) c = length(fatal_errors) if c > 0 - error("`makedocs` encountered $(c > 1 ? "errors" : "an error") [" - * join(Ref(":") .* string.(fatal_errors), ", ") - * "] -- terminating build before rendering.") + error( + "`makedocs` encountered $(c > 1 ? "errors" : "an error") [" * + join(Ref(":") .* string.(fatal_errors), ", ") * + "] -- terminating build before rendering." + ) else @info "RenderDocument: rendering document." Documenter.render(doc) diff --git a/src/cross_references.jl b/src/cross_references.jl index e31b0d6284..683840e732 100644 --- a/src/cross_references.jl +++ b/src/cross_references.jl @@ -41,7 +41,7 @@ function crossref(doc::Documenter.Document, page, mdast::MarkdownAST.Node) end function local_links!(node::MarkdownAST.Node, meta, page, doc) - @assert node.element isa Union{MarkdownAST.Link, MarkdownAST.Image} + @assert node.element isa Union{MarkdownAST.Link,MarkdownAST.Image} link_url = node.element.destination # If the link is an absolute URL, then there's nothing we need to do. We'll just # keep the Link object as is, and it should become an external hyperlink in the writer. @@ -59,9 +59,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc) if isempty(path) if node.element isa MarkdownAST.Image @docerror( - doc, :cross_references, + doc, + :cross_references, "invalid local image: path missing in $(Documenter.locrepr(page.source))", - link=node + link = node ) return end @@ -69,9 +70,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc) return elseif Sys.iswindows() && ':' in path @docerror( - doc, :cross_references, + doc, + :cross_references, "invalid local link/image: colons not allowed in paths on Windows in $(Documenter.locrepr(page.source))", - link=node + link = node ) return end @@ -80,9 +82,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc) path = normpath(joinpath(dirname(Documenter.pagekey(doc, page)), path)) if startswith(path, "..") @docerror( - doc, :cross_references, + doc, + :cross_references, "invalid local link/image: path pointing to a file outside of build directory in $(Documenter.locrepr(page.source))", - link=node + link = node ) return elseif path in keys(doc.blueprint.pages) @@ -95,9 +98,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc) if node.element isa MarkdownAST.Image if !isempty(fragment) @docerror( - doc, :cross_references, + doc, + :cross_references, "invalid local image: path contains a fragment in $(Documenter.locrepr(page.source))", - link=node + link = node ) end node.element = Documenter.LocalImage(path) @@ -107,9 +111,10 @@ function local_links!(node::MarkdownAST.Node, meta, page, doc) return else @docerror( - doc, :cross_references, + doc, + :cross_references, "invalid local link/image: file does not exist in $(Documenter.locrepr(page.source))", - link=node + link = node ) return end @@ -160,7 +165,7 @@ function xref(node::MarkdownAST.Node, meta, page, doc) # so that we wouldn't have to duplicate the @docerror call namedxref(node, slug, meta, page, doc) else - docsxref(node, slug, meta, page, doc; docref = docref) + docsxref(node, slug, meta, page, doc; docref=docref) end end @@ -189,7 +194,8 @@ function basicxref(node::MarkdownAST.Node, meta, page, doc) end end md = convert(Markdown.MD, ast) - text = strip(sprint(Markdown.plain, Markdown.Paragraph(md.content[1].content[1].text))) + text = + strip(sprint(Markdown.plain, Markdown.Paragraph(md.content[1].content[1].text))) if occursin(r"#[0-9]+", text) issue_xref(node, lstrip(text, '#'), meta, page, doc) else @@ -217,17 +223,32 @@ function namedxref(node::MarkdownAST.Node, slug, meta, page, doc) page = doc.blueprint.pages[pagekey] node.element = Documenter.PageLink(page, anchor_label(anchor)) else - @docerror(doc, :cross_references, "'$slug' is not unique in $(Documenter.locrepr(page.source)).") + @docerror( + doc, + :cross_references, + "'$slug' is not unique in $(Documenter.locrepr(page.source))." + ) end else - @docerror(doc, :cross_references, "reference for '$slug' could not be found in $(Documenter.locrepr(page.source)).") + @docerror( + doc, + :cross_references, + "reference for '$slug' could not be found in $(Documenter.locrepr(page.source))." + ) end end # Cross referencing docstrings. # ----------------------------- -function docsxref(node::MarkdownAST.Node, code, meta, page, doc; docref = find_docref(code, meta, page)) +function docsxref( + node::MarkdownAST.Node, + code, + meta, + page, + doc; + docref=find_docref(code, meta, page) +) @assert node.element isa MarkdownAST.Link # Add the link to list of local uncheck links. doc.internal.locallinks[node.element] = node.element.destination @@ -249,7 +270,11 @@ function docsxref(node::MarkdownAST.Node, code, meta, page, doc; docref = find_d page = doc.blueprint.pages[pagekey] node.element = Documenter.PageLink(page, slug) else - @docerror(doc, :cross_references, "no doc found for reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).") + @docerror( + doc, + :cross_references, + "no doc found for reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source))." + ) end end @@ -264,7 +289,10 @@ function find_docref(code, meta, page) ex = Meta.parse(code) catch err isa(err, Meta.ParseError) || rethrow(err) - return (error = "unable to parse the reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).", exception = nothing) + return ( + error="unable to parse the reference '[`$code`](@ref)' in $(Documenter.locrepr(page.source)).", + exception=nothing + ) end end mod = get(meta, :CurrentModule, Main) @@ -275,8 +303,8 @@ function find_docref(code, meta, page) binding = Documenter.DocSystem.binding(mod, ex) catch err return ( - error = "unable to get the binding for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)", - exception = (err, catch_backtrace()), + error="unable to get the binding for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)", + exception=(err, catch_backtrace()), ) return end @@ -286,13 +314,13 @@ function find_docref(code, meta, page) typesig = Core.eval(mod, Documenter.DocSystem.signature(ex, rstrip(code))) catch err return ( - error = "unable to evaluate the type signature for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)", - exception = (err, catch_backtrace()), + error="unable to evaluate the type signature for '[`$code`](@ref)' in $(Documenter.locrepr(page.source)) from expression '$(repr(ex))' in module $(mod)", + exception=(err, catch_backtrace()), ) return end - return (binding = binding, typesig = typesig) + return (binding=binding, typesig=typesig) end """ @@ -335,7 +363,7 @@ function find_object(binding, typesig) return Documenter.Object(binding, typesig) end end -function find_object(λ::Union{Function, DataType}, binding, typesig) +function find_object(λ::Union{Function,DataType}, binding, typesig) if hasmethod(λ, typesig) signature = getsig(λ, typesig) return Documenter.Object(binding, signature) @@ -343,10 +371,11 @@ function find_object(λ::Union{Function, DataType}, binding, typesig) return Documenter.Object(binding, typesig) end end -find_object(::Union{Function, DataType}, binding, ::Union{Union,Type{Union{}}}) = Documenter.Object(binding, Union{}) +find_object(::Union{Function,DataType}, binding, ::Union{Union,Type{Union{}}}) = + Documenter.Object(binding, Union{}) find_object(other, binding, typesig) = Documenter.Object(binding, typesig) -getsig(λ::Union{Function, DataType}, typesig) = Base.tuple_type_tail(which(λ, typesig).sig) +getsig(λ::Union{Function,DataType}, typesig) = Base.tuple_type_tail(which(λ, typesig).sig) # Issues/PRs cross referencing. @@ -355,9 +384,14 @@ getsig(λ::Union{Function, DataType}, typesig) = Base.tuple_type_tail(which(λ, function issue_xref(node::MarkdownAST.Node, num, meta, page, doc) @assert node.element isa MarkdownAST.Link # Update issue links starting with a hash, but only if our Remote supports it - issue_url = isnothing(doc.user.remote) ? nothing : Remotes.issueurl(doc.user.remote, num) + issue_url = + isnothing(doc.user.remote) ? nothing : Remotes.issueurl(doc.user.remote, num) if isnothing(issue_url) - @docerror(doc, :cross_references, "unable to generate issue reference for '[`#$num`](@ref)' in $(Documenter.locrepr(page.source)).") + @docerror( + doc, + :cross_references, + "unable to generate issue reference for '[`#$num`](@ref)' in $(Documenter.locrepr(page.source))." + ) else node.element.destination = issue_url end diff --git a/src/deployconfig.jl b/src/deployconfig.jl index 6c10795759..3c6787a281 100644 --- a/src/deployconfig.jl +++ b/src/deployconfig.jl @@ -70,11 +70,11 @@ and `devurl` arguments from [`deploydocs`](@ref). """ function deploy_folder(cfg::DeployConfig; kwargs...) @warn "Documenter.deploy_folder(::$(typeof(cfg)); kwargs...) not implemented. Skipping deployment." - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end function deploy_folder(::Nothing; kwargs...) @warn "Documenter could not auto-detect the building environment Skipping deployment." - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end @enum AuthenticationMethod SSH HTTPS @@ -151,26 +151,33 @@ struct Travis <: DeployConfig travis_event_type::String end function Travis() - travis_branch = get(ENV, "TRAVIS_BRANCH", "") - travis_pull_request = get(ENV, "TRAVIS_PULL_REQUEST", "") - travis_repo_slug = get(ENV, "TRAVIS_REPO_SLUG", "") - travis_tag = get(ENV, "TRAVIS_TAG", "") - travis_event_type = get(ENV, "TRAVIS_EVENT_TYPE", "") - return Travis(travis_branch, travis_pull_request, - travis_repo_slug, travis_tag, travis_event_type) + travis_branch = get(ENV, "TRAVIS_BRANCH", "") + travis_pull_request = get(ENV, "TRAVIS_PULL_REQUEST", "") + travis_repo_slug = get(ENV, "TRAVIS_REPO_SLUG", "") + travis_tag = get(ENV, "TRAVIS_TAG", "") + travis_event_type = get(ENV, "TRAVIS_EVENT_TYPE", "") + return Travis( + travis_branch, + travis_pull_request, + travis_repo_slug, + travis_tag, + travis_event_type + ) end # Check criteria for deployment -function deploy_folder(cfg::Travis; - repo, - repo_previews = repo, - branch = "gh-pages", - branch_previews = branch, - devbranch, - push_preview, - devurl, - tag_prefix = "", - kwargs...) +function deploy_folder( + cfg::Travis; + repo, + repo_previews=repo, + branch="gh-pages", + branch_previews=branch, + devbranch, + push_preview, + devurl, + tag_prefix="", + kwargs... +) io = IOBuffer() all_ok = true ## Determine build type; release, devbranch or preview @@ -185,17 +192,26 @@ function deploy_folder(cfg::Travis; ## The deploydocs' repo should match TRAVIS_REPO_SLUG repo_ok = occursin(cfg.travis_repo_slug, repo) all_ok &= repo_ok - println(io, "- $(marker(repo_ok)) ENV[\"TRAVIS_REPO_SLUG\"]=\"$(cfg.travis_repo_slug)\" occurs in repo=\"$(repo)\"") + println( + io, + "- $(marker(repo_ok)) ENV[\"TRAVIS_REPO_SLUG\"]=\"$(cfg.travis_repo_slug)\" occurs in repo=\"$(repo)\"" + ) if build_type === :release ## Do not deploy for PRs pr_ok = cfg.travis_pull_request == "false" - println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"") + println( + io, + "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"" + ) all_ok &= pr_ok tag_nobuild = version_tag_strip_build(cfg.travis_tag; tag_prefix) ## If a tag exist it should be a valid VersionNumber tag_ok = tag_nobuild !== nothing all_ok &= tag_ok - println(io, "- $(marker(tag_ok)) ENV[\"TRAVIS_TAG\"] contains a valid VersionNumber") + println( + io, + "- $(marker(tag_ok)) ENV[\"TRAVIS_TAG\"] contains a valid VersionNumber" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -204,12 +220,18 @@ function deploy_folder(cfg::Travis; elseif build_type === :devbranch ## Do not deploy for PRs pr_ok = cfg.travis_pull_request == "false" - println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"") + println( + io, + "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is \"false\"" + ) all_ok &= pr_ok ## deploydocs' devbranch should match TRAVIS_BRANCH branch_ok = !isempty(cfg.travis_tag) || cfg.travis_branch == devbranch all_ok &= branch_ok - println(io, "- $(marker(branch_ok)) ENV[\"TRAVIS_BRANCH\"] matches devbranch=\"$(devbranch)\"") + println( + io, + "- $(marker(branch_ok)) ENV[\"TRAVIS_BRANCH\"] matches devbranch=\"$(devbranch)\"" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -219,10 +241,16 @@ function deploy_folder(cfg::Travis; pr_number = tryparse(Int, cfg.travis_pull_request) pr_ok = pr_number !== nothing all_ok &= pr_ok - println(io, "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is a number") + println( + io, + "- $(marker(pr_ok)) ENV[\"TRAVIS_PULL_REQUEST\"]=\"$(cfg.travis_pull_request)\" is a number" + ) btype_ok = push_preview all_ok &= btype_ok - println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`") + println( + io, + "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`" + ) deploy_branch = branch_previews deploy_repo = repo_previews is_preview = true @@ -236,10 +264,16 @@ function deploy_folder(cfg::Travis; ## Cron jobs should not deploy type_ok = cfg.travis_event_type != "cron" all_ok &= type_ok - println(io, "- $(marker(type_ok)) ENV[\"TRAVIS_EVENT_TYPE\"]=\"$(cfg.travis_event_type)\" is not \"cron\"") + println( + io, + "- $(marker(type_ok)) ENV[\"TRAVIS_EVENT_TYPE\"]=\"$(cfg.travis_event_type)\" is not \"cron\"" + ) print(io, "Deploying: $(marker(all_ok))") @info String(take!(io)) - if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.travis_branch == "main" + if build_type === :devbranch && + !branch_ok && + devbranch == "master" && + cfg.travis_branch == "main" @warn """ Possible deploydocs() misconfiguration: main vs master Documenter's configured primary development branch (`devbranch`) is "master", but the @@ -253,13 +287,15 @@ function deploy_folder(cfg::Travis; """ end if all_ok - return DeployDecision(; all_ok = true, - branch = deploy_branch, - is_preview = is_preview, - repo = deploy_repo, - subfolder = subfolder) + return DeployDecision(; + all_ok=true, + branch=deploy_branch, + is_preview=is_preview, + repo=deploy_repo, + subfolder=subfolder + ) else - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end end @@ -298,21 +334,23 @@ end function GitHubActions() github_repository = get(ENV, "GITHUB_REPOSITORY", "") # "JuliaDocs/Documenter.jl" github_event_name = get(ENV, "GITHUB_EVENT_NAME", "") # "push", "pull_request" or "cron" (?) - github_ref = get(ENV, "GITHUB_REF", "") # "refs/heads/$(branchname)" for branch, "refs/tags/$(tagname)" for tags + github_ref = get(ENV, "GITHUB_REF", "") # "refs/heads/$(branchname)" for branch, "refs/tags/$(tagname)" for tags return GitHubActions(github_repository, github_event_name, github_ref) end # Check criteria for deployment -function deploy_folder(cfg::GitHubActions; - repo, - repo_previews = repo, - branch = "gh-pages", - branch_previews = branch, - devbranch, - push_preview, - devurl, - tag_prefix = "", - kwargs...) +function deploy_folder( + cfg::GitHubActions; + repo, + repo_previews=repo, + branch="gh-pages", + branch_previews=branch, + devbranch, + push_preview, + devurl, + tag_prefix="", + kwargs... +) io = IOBuffer() all_ok = true ## Determine build type @@ -323,22 +361,34 @@ function deploy_folder(cfg::GitHubActions; else build_type = :devbranch end - println(io, "Deployment criteria for deploying $(build_type) build from GitHub Actions:") + println( + io, + "Deployment criteria for deploying $(build_type) build from GitHub Actions:" + ) ## The deploydocs' repo should match GITHUB_REPOSITORY repo_ok = occursin(cfg.github_repository, repo) all_ok &= repo_ok - println(io, "- $(marker(repo_ok)) ENV[\"GITHUB_REPOSITORY\"]=\"$(cfg.github_repository)\" occurs in repo=\"$(repo)\"") + println( + io, + "- $(marker(repo_ok)) ENV[\"GITHUB_REPOSITORY\"]=\"$(cfg.github_repository)\" occurs in repo=\"$(repo)\"" + ) if build_type === :release ## Do not deploy for PRs event_ok = in(cfg.github_event_name, ["push", "workflow_dispatch", "schedule"]) all_ok &= event_ok - println(io, "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"") + println( + io, + "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"" + ) ## If a tag exist it should be a valid VersionNumber m = match(r"^refs\/tags\/(.*)$", cfg.github_ref) tag_nobuild = version_tag_strip_build(m.captures[1]; tag_prefix) tag_ok = tag_nobuild !== nothing all_ok &= tag_ok - println(io, "- $(marker(tag_ok)) ENV[\"GITHUB_REF\"]=\"$(cfg.github_ref)\" contains a valid VersionNumber") + println( + io, + "- $(marker(tag_ok)) ENV[\"GITHUB_REF\"]=\"$(cfg.github_ref)\" contains a valid VersionNumber" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -348,12 +398,18 @@ function deploy_folder(cfg::GitHubActions; ## Do not deploy for PRs event_ok = in(cfg.github_event_name, ["push", "workflow_dispatch", "schedule"]) all_ok &= event_ok - println(io, "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"") + println( + io, + "- $(marker(event_ok)) ENV[\"GITHUB_EVENT_NAME\"]=\"$(cfg.github_event_name)\" is \"push\", \"workflow_dispatch\" or \"schedule\"" + ) ## deploydocs' devbranch should match the current branch m = match(r"^refs\/heads\/(.*)$", cfg.github_ref) branch_ok = m === nothing ? false : String(m.captures[1]) == devbranch all_ok &= branch_ok - println(io, "- $(marker(branch_ok)) ENV[\"GITHUB_REF\"] matches devbranch=\"$(devbranch)\"") + println( + io, + "- $(marker(branch_ok)) ENV[\"GITHUB_REF\"] matches devbranch=\"$(devbranch)\"" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -366,13 +422,20 @@ function deploy_folder(cfg::GitHubActions; all_ok &= pr_ok println(io, "- $(marker(pr_ok)) ENV[\"GITHUB_REF\"] corresponds to a PR number") if pr_ok - pr_origin_matches_repo = verify_github_pull_repository(cfg.github_repository, pr_number) + pr_origin_matches_repo = + verify_github_pull_repository(cfg.github_repository, pr_number) all_ok &= pr_origin_matches_repo - println(io, "- $(marker(pr_origin_matches_repo)) PR originates from the same repository") + println( + io, + "- $(marker(pr_origin_matches_repo)) PR originates from the same repository" + ) end btype_ok = push_preview all_ok &= btype_ok - println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`") + println( + io, + "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`" + ) deploy_branch = branch_previews deploy_repo = repo_previews is_preview = true @@ -393,11 +456,17 @@ function deploy_folder(cfg::GitHubActions; elseif token_ok println(io, "- $(marker(token_ok)) ENV[\"GITHUB_TOKEN\"] exists and is non-empty") else - println(io, "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"GITHUB_TOKEN\"] exists and is non-empty") + println( + io, + "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"GITHUB_TOKEN\"] exists and is non-empty" + ) end print(io, "Deploying: $(marker(all_ok))") @info String(take!(io)) - if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.github_ref == "refs/heads/main" + if build_type === :devbranch && + !branch_ok && + devbranch == "master" && + cfg.github_ref == "refs/heads/main" @warn """ Possible deploydocs() misconfiguration: main vs master Documenter's configured primary development branch (`devbranch`) is "master", but the @@ -411,13 +480,15 @@ function deploy_folder(cfg::GitHubActions; """ end if all_ok - return DeployDecision(; all_ok = true, - branch = deploy_branch, - is_preview = is_preview, - repo = deploy_repo, - subfolder = subfolder) + return DeployDecision(; + all_ok=true, + branch=deploy_branch, + is_preview=is_preview, + repo=deploy_repo, + subfolder=subfolder + ) else - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end end @@ -454,7 +525,7 @@ function post_status(::GitHubActions; type, repo::String, subfolder=nothing, kwa if haskey(event, "pull_request") && haskey(event["pull_request"], "head") && haskey(event["pull_request"]["head"], "sha") - sha = event["pull_request"]["head"]["sha"] + sha = event["pull_request"]["head"]["sha"] end elseif get(ENV, "GITHUB_EVENT_NAME", nothing) == "push" sha = get(ENV, "GITHUB_SHA", nothing) @@ -466,7 +537,12 @@ function post_status(::GitHubActions; type, repo::String, subfolder=nothing, kwa end end -function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothing) where S <: String +function post_github_status( + type::S, + deploydocs_repo::S, + sha::S, + subfolder=nothing +) where {S<:String} try Sys.which("curl") === nothing && return ## Extract owner and repository name @@ -483,7 +559,7 @@ function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothi push!(cmd.exec, "-H", "Authorization: token $(auth)") push!(cmd.exec, "-H", "User-Agent: Documenter.jl") push!(cmd.exec, "-H", "Content-Type: application/json") - json = Dict{String,Any}("context" => "documenter/deploy", "state"=>type) + json = Dict{String,Any}("context" => "documenter/deploy", "state" => type) if type == "pending" json["description"] = "Documentation build in progress" elseif type == "success" @@ -505,7 +581,7 @@ function post_github_status(type::S, deploydocs_repo::S, sha::S, subfolder=nothi # Run the command (silently) io = IOBuffer() res = run(pipeline(cmd; stdout=io, stderr=devnull)) - @debug "Response of curl POST request" response=String(take!(io)) + @debug "Response of curl POST request" response = String(take!(io)) catch @debug "Failed to post status" end @@ -540,9 +616,9 @@ end function run_and_capture(cmd) stdout_buffer, stderr_buffer = IOBuffer(), IOBuffer() - run(pipeline(cmd; stdout = stdout_buffer, stderr = stderr_buffer)) + run(pipeline(cmd; stdout=stdout_buffer, stderr=stderr_buffer)) stdout, stderr = String(take!(stdout_buffer)), String(take!(stderr_buffer)) - return (; stdout = stdout, stderr = stderr) + return (; stdout=stdout, stderr=stderr) end ########## @@ -597,13 +673,13 @@ end function deploy_folder( cfg::GitLab; repo, - repo_previews = repo, + repo_previews=repo, devbranch, push_preview, devurl, - branch = "gh-pages", - branch_previews = branch, - tag_prefix = "", + branch="gh-pages", + branch_previews=branch, + tag_prefix="", kwargs..., ) io = IOBuffer() @@ -682,14 +758,14 @@ function deploy_folder( if all_ok return DeployDecision(; - all_ok = true, - branch = deploy_branch, - repo = deploy_repo, - subfolder = subfolder, - is_preview = is_preview, + all_ok=true, + branch=deploy_branch, + repo=deploy_repo, + subfolder=subfolder, + is_preview=is_preview, ) else - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end end @@ -740,13 +816,13 @@ end function deploy_folder( cfg::Buildkite; repo, - repo_previews = repo, + repo_previews=repo, devbranch, push_preview, devurl, - branch = "gh-pages", - branch_previews = branch, - tag_prefix = "", + branch="gh-pages", + branch_previews=branch, + tag_prefix="", kwargs..., ) io = IOBuffer() @@ -820,7 +896,10 @@ function deploy_folder( print(io, "Deploying to folder $(repr(subfolder)): $(marker(all_ok))") @info String(take!(io)) - if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.commit_branch == "main" + if build_type === :devbranch && + !branch_ok && + devbranch == "master" && + cfg.commit_branch == "main" @warn """ Possible deploydocs() misconfiguration: main vs master Documenter's configured primary development branch (`devbranch`) is "master", but the @@ -836,14 +915,14 @@ function deploy_folder( if all_ok return DeployDecision(; - all_ok = true, - branch = deploy_branch, - repo = deploy_repo, - subfolder = subfolder, - is_preview = is_preview, + all_ok=true, + branch=deploy_branch, + repo=deploy_repo, + subfolder=subfolder, + is_preview=is_preview, ) else - return DeployDecision(; all_ok = false) + return DeployDecision(; all_ok=false) end end @@ -962,7 +1041,8 @@ function Woodpecker() else woodpecker_ci_version = VersionNumber(ENV["CI_SYSTEM_VERSION"]) @warn "Current Woodpecker version is $(woodpecker_ci_version). Make sure this is correct." - if ENV["CI"] == "drone" && (v"1.0.0" > VersionNumber(ENV["CI_SYSTEM_VERSION"]) >= v"0.15.0") + if ENV["CI"] == "drone" && + (v"1.0.0" > VersionNumber(ENV["CI_SYSTEM_VERSION"]) >= v"0.15.0") @warn """Woodpecker prior version 1.0.0 is backward compatible to Drone but *there will be breaking changes in the future*. Please update to a newer version """ @@ -979,14 +1059,28 @@ function Woodpecker() woodpecker_repo = get(ENV, "CI_REPO", "") woodpecker_event_name = get(ENV, "CI_BUILD_EVENT", "") woodpecker_ref = get(ENV, "CI_COMMIT_REF", "") - return Woodpecker(woodpecker_ci_version, woodpecker_forge_url, woodpecker_repo, woodpecker_tag, woodpecker_event_name, woodpecker_ref) + return Woodpecker( + woodpecker_ci_version, + woodpecker_forge_url, + woodpecker_repo, + woodpecker_tag, + woodpecker_event_name, + woodpecker_ref + ) else woodpecker_forge_url = get(ENV, "CI_FORGE_URL", "") woodpecker_tag = get(ENV, "CI_COMMIT_TAG", "") woodpecker_repo = get(ENV, "CI_REPO", "") # repository full name / woodpecker_event_name = get(ENV, "CI_PIPELINE_EVENT", "") # build event (push, pull_request, tag, deployment) woodpecker_ref = get(ENV, "CI_COMMIT_REF", "") # commit ref - return Woodpecker(woodpecker_ci_version, woodpecker_forge_url, woodpecker_repo, woodpecker_tag, woodpecker_event_name, woodpecker_ref) + return Woodpecker( + woodpecker_ci_version, + woodpecker_forge_url, + woodpecker_repo, + woodpecker_tag, + woodpecker_event_name, + woodpecker_ref + ) end end @@ -1000,7 +1094,8 @@ function deploy_folder( push_preview, devurl, tag_prefix="", - kwargs...) + kwargs... +) io = IOBuffer() all_ok = true if cfg.woodpecker_event_name == "pull_request" @@ -1019,7 +1114,10 @@ function deploy_folder( repo_ok = occursin(cfg.woodpecker_repo, repo) all_ok &= repo_ok - println(io, "- $(marker(repo_ok)) ENV[\"CI_REPO\"]=\"$(cfg.woodpecker_repo)\" occursin in repo=\"$(repo)\"") + println( + io, + "- $(marker(repo_ok)) ENV[\"CI_REPO\"]=\"$(cfg.woodpecker_repo)\" occursin in repo=\"$(repo)\"" + ) ci_event_env_name = if haskey(ENV, "CI_PIPELINE_EVENT") "ENV[\"CI_PIPELINE_EVENT\"]" @@ -1028,13 +1126,20 @@ function deploy_folder( end if build_type === :release - event_ok = in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"]) + event_ok = + in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"]) all_ok &= event_ok - println(io, "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\" or \"tag\"") + println( + io, + "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\" or \"tag\"" + ) tag_nobuild = version_tag_strip_build(cfg.woodpecker_tag; tag_prefix) tag_ok = tag_nobuild !== nothing all_ok &= tag_ok - println(io, "- $(marker(tag_ok)) ENV[\"CI_COMMIT_TAG\"]=\"$(cfg.woodpecker_tag)\" contains a valid VersionNumber") + println( + io, + "- $(marker(tag_ok)) ENV[\"CI_COMMIT_TAG\"]=\"$(cfg.woodpecker_tag)\" contains a valid VersionNumber" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -1042,14 +1147,21 @@ function deploy_folder( subfolder = tag_nobuild elseif build_type === :devbranch ## Do not deploy for PRs - event_ok = in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"]) + event_ok = + in(cfg.woodpecker_event_name, ["push", "pull_request", "deployment", "tag"]) all_ok &= event_ok - println(io, "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\", or \"tag\"") + println( + io, + "- $(marker(event_ok)) $(ci_event_env_name)=\"$(cfg.woodpecker_event_name)\" is \"push\", \"deployment\", or \"tag\"" + ) ## deploydocs' devbranch should match the current branch m = match(r"^refs\/heads\/(.*)$", cfg.woodpecker_ref) branch_ok = (m === nothing) ? false : String(m.captures[1]) == devbranch all_ok &= branch_ok - println(io, "- $(marker(branch_ok)) ENV[\"CI_COMMIT_REF\"] matches devbranch=\"$(devbranch)\"") + println( + io, + "- $(marker(branch_ok)) ENV[\"CI_COMMIT_REF\"] matches devbranch=\"$(devbranch)\"" + ) deploy_branch = branch deploy_repo = repo is_preview = false @@ -1058,17 +1170,34 @@ function deploy_folder( else # build_type === :preview m = match(r"refs\/pull\/(\d+)\/merge", cfg.woodpecker_ref) pr_number1 = tryparse(Int, (m === nothing) ? "" : m.captures[1]) - pr_number2 = tryparse(Int, get(ENV, "CI_COMMIT_PULL_REQUEST", nothing) === nothing ? "" : ENV["CI_COMMIT_PULL_REQUEST"]) + pr_number2 = tryparse( + Int, + if get(ENV, "CI_COMMIT_PULL_REQUEST", nothing) === nothing + "" + else + ENV["CI_COMMIT_PULL_REQUEST"] + end + ) # Check if both are Ints. If both are Ints, then check if they are equal, otherwise, return false - pr_numbers_ok = all(x -> x isa Int, [pr_number1, pr_number2]) ? (pr_number1 == pr_number2) : false + pr_numbers_ok = if all(x -> x isa Int, [pr_number1, pr_number2]) + (pr_number1 == pr_number2) + else + false + end is_pull_request_ok = cfg.woodpecker_event_name == "pull_request" pr_ok = pr_numbers_ok == is_pull_request_ok all_ok &= pr_ok println(io, "- $(marker(pr_numbers_ok)) ENV[\"CI_COMMIT_REF\"] corresponds to a PR") - println(io, "- $(marker(is_pull_request_ok)) $(ci_event_env_name) matches built type: `pull_request`") + println( + io, + "- $(marker(is_pull_request_ok)) $(ci_event_env_name) matches built type: `pull_request`" + ) btype_ok = push_preview all_ok &= btype_ok - println(io, "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`") + println( + io, + "- $(marker(btype_ok)) `push_preview` keyword argument to deploydocs is `true`" + ) deploy_branch = branch_previews deploy_repo = repo_previews is_preview = true @@ -1084,21 +1213,31 @@ function deploy_folder( if key_ok println(io, "- $(marker(key_ok)) ENV[\"DOCUMENTER_KEY\"] exists and is non-empty") elseif token_ok - println(io, "- $(marker(token_ok)) ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty") + println( + io, + "- $(marker(token_ok)) ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty" + ) else - println(io, "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty") + println( + io, + "- $(marker(auth_ok)) ENV[\"DOCUMENTER_KEY\"] or ENV[\"PROJECT_ACCESS_TOKEN\"] exists and is non-empty" + ) end print(io, "Deploying: $(marker(all_ok))") @info String(take!(io)) - if build_type === :devbranch && !branch_ok && devbranch == "master" && cfg.woodpecker_ref == "refs/heads/main" + if build_type === :devbranch && + !branch_ok && + devbranch == "master" && + cfg.woodpecker_ref == "refs/heads/main" @warn """ Possible deploydocs() misconfiguration: main vs master. Current branch (from \$CI_COMMIT_REF) is "main". """ end if all_ok - return DeployDecision(; all_ok=true, + return DeployDecision(; + all_ok=true, branch=deploy_branch, is_preview=is_preview, repo=deploy_repo, diff --git a/src/deploydocs.jl b/src/deploydocs.jl index 8b67869ac5..3652603f55 100644 --- a/src/deploydocs.jl +++ b/src/deploydocs.jl @@ -179,29 +179,24 @@ using the [`deploydocs`](@ref) function to automatically generate docs and push GitHub. """ function deploydocs(; - root = currentdir(), - target = "build", - dirname = "", - - repo = error("no 'repo' keyword provided."), - branch = "gh-pages", - - repo_previews = repo, - branch_previews = branch, - - deps = nothing, - make = nothing, - - devbranch = nothing, - devurl = "dev", - versions = ["stable" => "v^", "v#.#", devurl => devurl], - forcepush::Bool = false, - deploy_config = auto_detect_deploy_system(), - push_preview::Bool = false, - tag_prefix = "", - - archive = nothing, # experimental and undocumented - ) + root=currentdir(), + target="build", + dirname="", + repo = error("no 'repo' keyword provided."), + branch = "gh-pages", + repo_previews = repo, + branch_previews = branch, + deps = nothing, + make = nothing, + devbranch=nothing, + devurl="dev", + versions=["stable" => "v^", "v#.#", devurl => devurl], + forcepush::Bool=false, + deploy_config=auto_detect_deploy_system(), + push_preview::Bool=false, + tag_prefix="", + archive=nothing, # experimental and undocumented +) # Try to figure out default branch (see #1443 and #1727) if devbranch === nothing @@ -215,15 +210,17 @@ function deploydocs(; ispath(archive) && error("Output archive exists: $archive") end - deploy_decision = deploy_folder(deploy_config; - branch=branch, - branch_previews=branch_previews, - devbranch=devbranch, - devurl=devurl, - push_preview=push_preview, - repo=repo, - repo_previews=repo_previews, - tag_prefix) + deploy_decision = deploy_folder( + deploy_config; + branch=branch, + branch_previews=branch_previews, + devbranch=devbranch, + devurl=devurl, + push_preview=push_preview, + repo=repo, + repo_previews=repo_previews, + tag_prefix + ) if deploy_decision.all_ok deploy_branch = deploy_decision.branch deploy_repo = deploy_decision.repo @@ -271,11 +268,20 @@ function deploydocs(; @debug "pushing new documentation to remote: '$deploy_repo:$deploy_branch'." mktempdir() do temp git_push( - root, temp, deploy_repo; - branch=deploy_branch, dirname=dirname, target=target, - sha=sha, deploy_config=deploy_config, subfolder=deploy_subfolder, - devurl=devurl, versions=versions, forcepush=forcepush, - is_preview=deploy_is_preview, archive=archive, + root, + temp, + deploy_repo; + branch=deploy_branch, + dirname=dirname, + target=target, + sha=sha, + deploy_config=deploy_config, + subfolder=deploy_subfolder, + devurl=devurl, + versions=versions, + forcepush=forcepush, + is_preview=deploy_is_preview, + archive=archive, ) end end @@ -293,11 +299,21 @@ Handles pushing changes to the remote documentation branch. The documentation are placed in the folder specified by `subfolder`. """ function git_push( - root, temp, repo; - branch="gh-pages", dirname="", target="site", sha="", devurl="dev", - versions, forcepush=false, deploy_config, subfolder, - is_preview::Bool = false, archive, - ) + root, + temp, + repo; + branch="gh-pages", + dirname="", + target="site", + sha="", + devurl="dev", + versions, + forcepush=false, + deploy_config, + subfolder, + is_preview::Bool=false, + archive, +) dirname = isempty(dirname) ? temp : joinpath(temp, dirname) isdir(dirname) || mkpath(dirname) @@ -358,7 +374,11 @@ function git_push( # Create the versions.js file containing a list of `entries`. # This must always happen after the folder copying. - HTMLWriter.generate_version_file(joinpath(dirname, "versions.js"), entries, symlinks) + HTMLWriter.generate_version_file( + joinpath(dirname, "versions.js"), + entries, + symlinks + ) # Create the index.html file to redirect ./stable or ./dev. # This must always happen after the folder copying. @@ -371,15 +391,23 @@ function git_push( if i === nothing rm_and_add_symlink(kv.second, kv.first) else - throw(ArgumentError(string("link `$(kv)` cannot overwrite ", - "`devurl = $(devurl)` with the same name."))) + throw( + ArgumentError( + string( + "link `$(kv)` cannot overwrite ", + "`devurl = $(devurl)` with the same name." + ) + ) + ) end end end end # Add, commit, and push the docs to the remote. - run(`$(git()) add -A -- ':!.documenter-identity-file.tmp' ':!**/.documenter-identity-file.tmp'`) + run( + `$(git()) add -A -- ':!.documenter-identity-file.tmp' ':!**/.documenter-identity-file.tmp'` + ) if !success(`$(git()) diff --cached --exit-code`) if !isnothing(archive) run(`$(git()) commit -m "build based on $sha"`) @@ -423,16 +451,18 @@ function git_push( try mktemp() do sshconfig, io - print(io, - """ - Host $host - StrictHostKeyChecking no - User $user - HostName $host - IdentityFile "$keyfile" - IdentitiesOnly yes - BatchMode yes - """) + print( + io, + """ + Host $host + StrictHostKeyChecking no + User $user + HostName $host + IdentityFile "$keyfile" + IdentitiesOnly yes + BatchMode yes + """ + ) close(io) chmod(sshconfig, 0o600) # git config core.sshCommand requires git 2.10.0, but @@ -443,7 +473,7 @@ function git_push( end post_status(deploy_config; repo=repo, type="success", subfolder=subfolder) catch e - @error "Failed to push:" exception=(e, catch_backtrace()) + @error "Failed to push:" exception = (e, catch_backtrace()) post_status(deploy_config; repo=repo, type="error") rethrow(e) finally @@ -457,7 +487,7 @@ function git_push( cd(() -> withenv(git_commands, NO_KEY_ENV...), temp) post_status(deploy_config; repo=repo, type="success", subfolder=subfolder) catch e - @error "Failed to push:" exception=(e, catch_backtrace()) + @error "Failed to push:" exception = (e, catch_backtrace()) post_status(deploy_config; repo=repo, type="error") rethrow(e) end @@ -467,7 +497,7 @@ end function rm_and_add_symlink(target, link) if ispath(link) || islink(link) @warn "removing `$(link)` and linking `$(link)` to `$(target)`." - rm(link; force = true, recursive = true) + rm(link; force=true, recursive=true) end symlink(target, link) end @@ -480,7 +510,8 @@ Disassemble repo address into user, host, and path to repo. If no user is given, """ function user_host_upstream(repo) # If the repo path contains the protocol, throw immediately an error. - occursin(r"^[a-z]+://", repo) && error("The repo path $(repo) should not contain the protocol") + occursin(r"^[a-z]+://", repo) && + error("The repo path $(repo) should not contain the protocol") #= the regex has three parts: (?:([^@]*)@)? matches any number of characters up to the first "@", if present, capturing only the characters before the "@" - this captures the username diff --git a/src/docchecks.jl b/src/docchecks.jl index 89b490fbc5..5d56233352 100644 --- a/src/docchecks.jl +++ b/src/docchecks.jl @@ -27,10 +27,13 @@ function missingdocs(doc::Document) end end println(b) - print(b, """ - These are docstrings in the checked modules (configured with the modules keyword) - that are not included in canonical @docs or @autodocs blocks. - """) + print( + b, + """ + These are docstrings in the checked modules (configured with the modules keyword) + that are not included in canonical @docs or @autodocs blocks. + """ + ) @docerror(doc, :missing_docs, String(take!(b))) end return n @@ -48,13 +51,17 @@ function missingbindings(doc::Document) # module bindings that come from Docs.meta() always appear to be of the form # Docs.Binding(Mod.SubMod, :SubMod) (since Julia 0.7). We therefore "normalize" # module bindings before we search in the list returned by allbindings(). - binding = if DocSystem.defined(object.binding) && !DocSystem.iskeyword(object.binding) - m = DocSystem.resolve(object.binding) - isa(m, Module) && nameof(object.binding.mod) != object.binding.var ? - Docs.Binding(m, nameof(m)) : object.binding - else - object.binding - end + binding = + if DocSystem.defined(object.binding) && !DocSystem.iskeyword(object.binding) + m = DocSystem.resolve(object.binding) + if isa(m, Module) && nameof(object.binding.mod) != object.binding.var + Docs.Binding(m, nameof(m)) + else + object.binding + end + else + object.binding + end if haskey(bindings, binding) signatures = bindings[binding] if object.signature ≡ Union{} || length(signatures) ≡ 1 @@ -68,14 +75,14 @@ function missingbindings(doc::Document) end function allbindings(checkdocs::Symbol, mods) - out = Dict{Binding, Set{Type}}() + out = Dict{Binding,Set{Type}}() for m in mods allbindings(checkdocs, m, out) end out end -function allbindings(checkdocs::Symbol, mod::Module, out = Dict{Binding, Set{Type}}()) +function allbindings(checkdocs::Symbol, mod::Module, out=Dict{Binding,Set{Type}}()) for (binding, doc) in meta(mod) # The keys of the docs meta dictionary should always be Docs.Binding objects in # practice. However, the key type is Any, so it is theoretically possible that @@ -117,9 +124,9 @@ function footnotes(doc::Document) # # For all ids the final result should be `(N, 1)` where `N > 1`, i.e. one or more # footnote references and a single footnote body. - footnotes = Dict{Page, Dict{String, Tuple{Int, Int}}}() + footnotes = Dict{Page,Dict{String,Tuple{Int,Int}}}() for (src, page) in doc.blueprint.pages - orphans = Dict{String, Tuple{Int, Int}}() + orphans = Dict{String,Tuple{Int,Int}}() for node in AbstractTrees.PreOrderDFS(page.mdast) footnote(node.element, orphans) end @@ -129,15 +136,27 @@ function footnotes(doc::Document) for (id, (ids, bodies)) in orphans # Multiple footnote bodies. if bodies > 1 - @docerror(doc, :footnote, "footnote '$id' has $bodies bodies in $(locrepr(page.source)).") + @docerror( + doc, + :footnote, + "footnote '$id' has $bodies bodies in $(locrepr(page.source))." + ) end # No footnote references for an id. if ids === 0 - @docerror(doc, :footnote, "unused footnote named '$id' in $(locrepr(page.source)).") + @docerror( + doc, + :footnote, + "unused footnote named '$id' in $(locrepr(page.source))." + ) end # No footnote bodies for an id. if bodies === 0 - @docerror(doc, :footnote, "no footnotes found for '$id' in $(locrepr(page.source)).") + @docerror( + doc, + :footnote, + "no footnotes found for '$id' in $(locrepr(page.source))." + ) end end end @@ -158,7 +177,13 @@ footnote(other, orphans::Dict) = true # Link Checks. # ------------ -hascurl() = (try; success(`curl --version`); catch err; false; end) +hascurl() = ( + try + success(`curl --version`) + catch err + false + end +) """ $(SIGNATURES) @@ -184,14 +209,23 @@ function linkcheck(mdast::MarkdownAST.Node, doc::Document) end end -function linkcheck(node::MarkdownAST.Node, element::MarkdownAST.AbstractElement, doc::Document) +function linkcheck( + node::MarkdownAST.Node, + element::MarkdownAST.AbstractElement, + doc::Document +) # The linkcheck is only active for specific `element` types # (`MarkdownAST.Link`, most importantly), which are defined below as more # specific methods return nothing end -function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document; method::Symbol=:HEAD) +function linkcheck( + node::MarkdownAST.Node, + link::MarkdownAST.Link, + doc::Document; + method::Symbol=:HEAD +) # first, make sure we're not supposed to ignore this link for r in doc.user.linkcheck_ignore @@ -212,12 +246,16 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document # Mozilla developer docs, but only is it's a HTTP(S) request. # # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent#chrome_ua_string - fakebrowser = startswith(uppercase(link.destination), "HTTP") ? [ + fakebrowser = if startswith(uppercase(link.destination), "HTTP") + [ "--user-agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36", "-H", "accept-encoding: gzip, deflate, br", - ] : "" + ] + else + "" + end cmd = `curl $(method === :HEAD ? "-sI" : "-s") --proto =http,https,ftp,ftps $(fakebrowser) $(link.destination) --max-time $timeout -o $null_file --write-out "%{http_code} %{url_effective} %{redirect_url}"` local result @@ -234,11 +272,16 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document status, scheme, location = matched.captures status = parse(Int, status) scheme = uppercase(scheme) - protocol = startswith(scheme, "HTTP") ? :HTTP : - startswith(scheme, "FTP") ? :FTP : :UNKNOWN + protocol = if startswith(scheme, "HTTP") + :HTTP + elseif startswith(scheme, "FTP") + :FTP + else + :UNKNOWN + end if (protocol === :HTTP && (status < 300 || status == 302)) || - (protocol === :FTP && (200 <= status < 300 || status == 350)) + (protocol === :FTP && (200 <= status < 300 || status == 350)) if location !== nothing @debug "linkcheck '$(link.destination)' status: $(status), redirects to '$(location)'" else @@ -255,7 +298,11 @@ function linkcheck(node::MarkdownAST.Node, link::MarkdownAST.Link, doc::Document @debug "linkcheck '$(link.destination)' status: $(status), retrying without `-I`" return linkcheck(node, link, doc; method=:GET) else - @docerror(doc, :linkcheck, "linkcheck '$(link.destination)' status: $(status).") + @docerror( + doc, + :linkcheck, + "linkcheck '$(link.destination)' status: $(status)." + ) end else @docerror(doc, :linkcheck, "invalid result returned by $cmd:", result) @@ -305,7 +352,9 @@ function tag(repo, tag_ref) if status == 404 return nothing elseif status != 200 - error("Unexpected error code $(status) '$(repo)' while getting tag '$(tag_ref)'.") + error( + "Unexpected error code $(status) '$(repo)' while getting tag '$(tag_ref)'." + ) end end return result @@ -314,7 +363,9 @@ end function gitcommit(repo, commit_tag) status, result = gh_get_json("/repos/$(repo)/git/commits/$(commit_tag)") if status != 200 - error("Unexpected error code $(status) '$(repo)' while getting commit '$(commit_tag)'.") + error( + "Unexpected error code $(status) '$(repo)' while getting commit '$(commit_tag)'." + ) end return result end @@ -362,11 +413,19 @@ function githubcheck(doc::Document) tag_guess = remote_repo.commit tag_ref = tag(repo, tag_guess) if tag_ref === nothing - @docerror(doc, :linkcheck_remotes, "linkcheck (remote) '$(repo)' error while getting tag '$(tag_guess)'. $(GITHUB_ERROR_ADVICE)") + @docerror( + doc, + :linkcheck_remotes, + "linkcheck (remote) '$(repo)' error while getting tag '$(tag_guess)'. $(GITHUB_ERROR_ADVICE)" + ) return end if tag_ref["object"]["type"] != "commit" - @docerror(doc, :linkcheck_remotes, "linkcheck (remote) '$(repo)' tag '$(tag_guess)' does not point to a commit. $(GITHUB_ERROR_ADVICE)") + @docerror( + doc, + :linkcheck_remotes, + "linkcheck (remote) '$(repo)' tag '$(tag_guess)' does not point to a commit. $(GITHUB_ERROR_ADVICE)" + ) return end commit_sha = tag_ref["object"]["sha"] diff --git a/src/docs_precompile/make.jl b/src/docs_precompile/make.jl index 611881bb32..ecfd84ac70 100644 --- a/src/docs_precompile/make.jl +++ b/src/docs_precompile/make.jl @@ -1,12 +1,10 @@ using Documenter, Logging with_logger(NullLogger()) do -makedocs( - sitename = "TestPkg", - pages = Any[ - "Home" => "index.md", - ], - build = mktempdir(), - remotes = nothing, -) + makedocs( + sitename="TestPkg", + pages=Any["Home"=>"index.md",], + build=mktempdir(), + remotes=nothing, + ) end diff --git a/src/doctest.jl b/src/doctest.jl index 8c8640a77d..fb41df2df7 100644 --- a/src/doctest.jl +++ b/src/doctest.jl @@ -23,12 +23,12 @@ function doctest(package::Module; manual=true, testset=nothing, kwargs...) end source = nothing if manual === true - source = normpath(joinpath(dirname(pathof(package)), "..", "docs", "src")) - isdir(source) || throw(ArgumentError(""" - Package $(package) does not have a documentation source directory at standard location. - Searched at: $(source) - If ... - """)) + source = normpath(joinpath(dirname(pathof(package)), "..", "docs", "src")) + isdir(source) || throw(ArgumentError(""" + Package $(package) does not have a documentation source directory at standard location. + Searched at: $(source) + If ... + """)) end testset = (testset === nothing) ? "Doctests: $(package)" : testset doctest(source, [package]; testset=testset, kwargs...) @@ -64,13 +64,13 @@ manual pages can be disabled if `source` is set to `nothing`. fixing fails. """ function doctest( - source::Union{AbstractString,Nothing}, - modules::AbstractVector{Module}; - fix = false, - testset = "Doctests", - doctestfilters = Regex[], - plugins = Plugin[], - ) + source::Union{AbstractString,Nothing}, + modules::AbstractVector{Module}; + fix=false, + testset="Doctests", + doctestfilters=Regex[], + plugins=Plugin[], +) function all_doctests() dir = mktempdir() try @@ -80,12 +80,12 @@ function doctest( mkdir(source) end makedocs(; - root = dir, - source = source, - sitename = "", - doctest = fix ? :fix : :only, - modules = modules, - doctestfilters = doctestfilters, + root=dir, + source=source, + sitename="", + doctest=fix ? :fix : :only, + modules=modules, + doctestfilters=doctestfilters, # When doctesting, we don't really want to get bogged down with issues # related to determining the remote repositories for edit URLs and such remotes = nothing, @@ -93,13 +93,14 @@ function doctest( ) true catch err - @error "Doctesting failed" exception=(err, catch_backtrace()) + @error "Doctesting failed" exception = (err, catch_backtrace()) false finally try rm(dir; recursive=true) catch e - @warn "Documenter was unable to clean up the temporary directory $(dir)" exception = e + @warn "Documenter was unable to clean up the temporary directory $(dir)" exception = + e end end end diff --git a/src/doctests.jl b/src/doctests.jl index 685cc73acb..ef00e611be 100644 --- a/src/doctests.jl +++ b/src/doctests.jl @@ -5,15 +5,16 @@ # ------------------------- mutable struct MutableMD2CodeBlock - language :: String - code :: String + language::String + code::String end -MutableMD2CodeBlock(block :: MarkdownAST.CodeBlock) = MutableMD2CodeBlock(block.info, block.code) +MutableMD2CodeBlock(block::MarkdownAST.CodeBlock) = + MutableMD2CodeBlock(block.info, block.code) struct DocTestContext - file :: String - doc :: Documenter.Document - meta :: Dict{Symbol, Any} + file::String + doc::Documenter.Document + meta::Dict{Symbol,Any} DocTestContext(file::String, doc::Documenter.Document) = new(file, doc, Dict()) end @@ -77,13 +78,18 @@ end function parse_metablock(ctx::DocTestContext, block::MarkdownAST.CodeBlock) @assert startswith(block.info, "@meta") - meta = Dict{Symbol, Any}() + meta = Dict{Symbol,Any}() for (ex, str) in Documenter.parseblock(block.code, ctx.doc, ctx.file) if Documenter.isassign(ex) try meta[ex.args[1]] = Core.eval(Main, ex.args[2]) catch err - @docerror(ctx.doc, :meta_block, "Failed to evaluate `$(strip(str))` in `@meta` block.", exception = err) + @docerror( + ctx.doc, + :meta_block, + "Failed to evaluate `$(strip(str))` in `@meta` block.", + exception = err + ) end end end @@ -105,7 +111,7 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) lang = block_immutable.info if startswith(lang, "jldoctest") # Define new module or reuse an old one from this page if we have a named doctest. - name = match(r"jldoctest[ ]?(.*)$", split(lang, ';', limit = 2)[1])[1] + name = match(r"jldoctest[ ]?(.*)$", split(lang, ';', limit=2)[1])[1] sandbox = Documenter.get_sandbox_module!(ctx.meta, "doctest", name) # Normalise line endings. @@ -122,7 +128,9 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) e isa Meta.ParseError || rethrow(e) file = ctx.meta[:CurrentFile] lines = Documenter.find_block_in_file(block.code, file) - @docerror(ctx.doc, :doctest, + @docerror( + ctx.doc, + :doctest, """ Unable to parse doctest keyword arguments in $(Documenter.locrepr(file, lines)) Use ```jldoctest name; key1 = value1, key2 = value2 @@ -130,14 +138,18 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) ```$(lang) $(block.code) ``` - """, parse_error = e) + """, + parse_error = e + ) return false end for kwarg in kwargs.args if !(isa(kwarg, Expr) && kwarg.head === :(=) && isa(kwarg.args[1], Symbol)) file = ctx.meta[:CurrentFile] lines = Documenter.find_block_in_file(block.code, file) - @docerror(ctx.doc, :doctest, + @docerror( + ctx.doc, + :doctest, """ invalid syntax for doctest keyword arguments in $(Documenter.locrepr(file, lines)) Use ```jldoctest name; key1 = value1, key2 = value2 @@ -145,7 +157,8 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) ```$(lang) $(block.code) ``` - """) + """ + ) return false end d[kwarg.args[1]] = Core.eval(sandbox, kwarg.args[2]) @@ -153,14 +166,20 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) end ctx.meta[:LocalDocTestArguments] = d - for expr in [get(ctx.meta, :DocTestSetup, []); get(ctx.meta[:LocalDocTestArguments], :setup, [])] + for expr in [ + get(ctx.meta, :DocTestSetup, []) + get(ctx.meta[:LocalDocTestArguments], :setup, []) + ] Meta.isexpr(expr, :block) && (expr.head = :toplevel) try Core.eval(sandbox, expr) catch e push!(ctx.doc.internal.errors, :doctest) - @error("could not evaluate expression from doctest setup.", - expression = expr, exception = e) + @error( + "could not evaluate expression from doctest setup.", + expression = expr, + exception = e + ) return false end end @@ -171,7 +190,9 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) elseif occursin(r"^# output\s+$"m, block.code) file = ctx.meta[:CurrentFile] lines = Documenter.find_block_in_file(block.code, file) - @docerror(ctx.doc, :doctest, + @docerror( + ctx.doc, + :doctest, """ invalid doctest block in $(Documenter.locrepr(file, lines)) Requires `# output` without trailing whitespace @@ -179,11 +200,14 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) ```$(lang) $(block.code) ``` - """) + """ + ) else file = ctx.meta[:CurrentFile] lines = Documenter.find_block_in_file(block.code, file) - @docerror(ctx.doc, :doctest, + @docerror( + ctx.doc, + :doctest, """ invalid doctest block in $(Documenter.locrepr(file, lines)) Requires `julia> ` or `# output` @@ -191,7 +215,8 @@ function _doctest(ctx::DocTestContext, block_immutable::MarkdownAST.CodeBlock) ```$(lang) $(block.code) ``` - """) + """ + ) end delete!(ctx.meta, :LocalDocTestArguments) end @@ -220,14 +245,16 @@ function eval_repl(block, sandbox, meta::Dict, doc::Documenter.Document, page) src_lines = Documenter.find_block_in_file(block.code, meta[:CurrentFile]) for (input, output) in repl_splitter(block.code) result = Result(block, input, output, meta[:CurrentFile]) - for (ex, str) in Documenter.parseblock(input, doc, page; keywords = false, raise=false) + for (ex, str) in + Documenter.parseblock(input, doc, page; keywords=false, raise=false) # Input containing a semi-colon gets suppressed in the final output. - @debug "Evaluating REPL line from doctest at $(Documenter.locrepr(result.file, src_lines))" unparsed_string = str parsed_expression = ex + @debug "Evaluating REPL line from doctest at $(Documenter.locrepr(result.file, src_lines))" unparsed_string = + str parsed_expression = ex result.hide = REPL.ends_with_semicolon(str) # Use the REPL softscope for REPL jldoctests, # see https://github.com/JuliaLang/julia/pull/33864 ex = REPL.softscope(ex) - c = IOCapture.capture(rethrow = InterruptException) do + c = IOCapture.capture(rethrow=InterruptException) do Core.eval(sandbox, ex) end Core.eval(sandbox, Expr(:global, Expr(:(=), :ans, QuoteNode(c.value)))) @@ -249,12 +276,12 @@ function eval_script(block, sandbox, meta::Dict, doc::Documenter.Document, page) # # # to mark `input`/`output` separation. - input, output = split(block.code, r"^# output$"m, limit = 2) - input = rstrip(input, '\n') + input, output = split(block.code, r"^# output$"m, limit=2) + input = rstrip(input, '\n') output = lstrip(output, '\n') result = Result(block, input, output, meta[:CurrentFile]) - for (ex, str) in Documenter.parseblock(input, doc, page; keywords = false, raise=false) - c = IOCapture.capture(rethrow = InterruptException) do + for (ex, str) in Documenter.parseblock(input, doc, page; keywords=false, raise=false) + c = IOCapture.capture(rethrow=InterruptException) do Core.eval(sandbox, ex) end result.value = c.value @@ -273,7 +300,7 @@ function filter_doctests(filters, strings) # removed before comparing the inputs and outputs of a doctest. However, it can # also be a regex => substitution pair in which case the match gets replaced by # the substitution string. - r, s = if isa(rs, Pair{Regex,T} where T <: AbstractString) + r, s = if isa(rs, Pair{Regex,T} where {T<:AbstractString}) rs elseif isa(rs, Regex) || isa(rs, AbstractString) rs, "" @@ -298,15 +325,18 @@ function checkresult(sandbox::Module, result::Result, meta::Dict, doc::Documente # To avoid dealing with path/line number issues in backtraces we use `[...]` to # mark ignored output from an error message. Only the text prior to it is used to # test for doctest success/failure. - head = replace(split(result.output, "\n[...]"; limit = 2)[1], mod_regex => "") + head = replace(split(result.output, "\n[...]"; limit=2)[1], mod_regex => "") head = replace(head, mod_regex_nodot => "Main") - str = error_to_string(outio, result.value, result.bt) - str = replace(str, mod_regex => "") - str = replace(str, mod_regex_nodot => "Main") + str = error_to_string(outio, result.value, result.bt) + str = replace(str, mod_regex => "") + str = replace(str, mod_regex_nodot => "Main") filteredstr, filteredhead = filter_doctests(filters, (str, head)) @debug debug_report( - result=result, filters = filters, expected_filtered = filteredhead, - evaluated = rstrip(str), evaluated_filtered = filteredstr + result=result, + filters=filters, + expected_filtered=filteredhead, + evaluated=rstrip(str), + evaluated_filtered=filteredstr ) # Since checking for the prefix of an error won't catch the empty case we need # to check that manually with `isempty`. @@ -327,8 +357,11 @@ function checkresult(sandbox::Module, result::Result, meta::Dict, doc::Documente str = rstrip(replace(str, mod_regex_nodot => "Main")) filteredstr, filteredoutput = filter_doctests(filters, (str, output)) @debug debug_report( - result=result, filters = filters, expected_filtered = filteredoutput, - evaluated = rstrip(str), evaluated_filtered = filteredstr + result=result, + filters=filters, + expected_filtered=filteredoutput, + evaluated=rstrip(str), + evaluated_filtered=filteredstr ) if filteredstr != filteredoutput if doc.user.doctest === :fix @@ -366,8 +399,11 @@ function debug_report(; result, expected_filtered, evaluated, evaluated_filtered """ if !isempty(filters) r *= "\n" - r *= (length(filters) == 1) ? "1 doctest filter was applied:\n\n" : + r *= if (length(filters) == 1) + "1 doctest filter was applied:\n\n" + else "$(length(filters)) doctest filters were applied:\n\n" + end for rs in filters r *= " $rs\n" end @@ -395,7 +431,8 @@ end # Display doctesting results. function result_to_string(buf, value) - value === nothing || Base.invokelatest(show, IOContext(buf, :limit => true), MIME"text/plain"(), value) + value === nothing || + Base.invokelatest(show, IOContext(buf, :limit => true), MIME"text/plain"(), value) return sanitise(buf) end @@ -404,7 +441,7 @@ function error_to_string(buf, er, bt) bt = Documenter.remove_common_backtrace(bt, backtrace()) # Remove everything below the last eval call (which should be the one in IOCapture.capture) index = findlast(ptr -> Base.ip_matches_func(ptr, :eval), bt) - bt = (index === nothing) ? bt : bt[1:(index - 1)] + bt = (index === nothing) ? bt : bt[1:(index-1)] # Print a REPL-like error message. print(buf, "ERROR: ") Base.invokelatest(showerror, buf, er, bt) @@ -426,26 +463,31 @@ function report(result::Result, str, doc::Documenter.Document) diff = TextDiff.Diff{TextDiff.Words}(result.output, rstrip(str)) lines = Documenter.find_block_in_file(result.block.code, result.file) line = lines === nothing ? nothing : first(lines) - @error(""" - doctest failure in $(Documenter.locrepr(result.file, lines)) + @error( + """ + doctest failure in $(Documenter.locrepr(result.file, lines)) - ```$(result.block.language) - $(result.block.code) - ``` + ```$(result.block.language) + $(result.block.code) + ``` - Subexpression: + Subexpression: - $(result.input) + $(result.input) - Evaluated output: + Evaluated output: - $(rstrip(str)) + $(rstrip(str)) - Expected output: + Expected output: - $(result.output) + $(result.output) - """, diff, _file=result.file, _line=line) + """, + diff, + _file = result.file, + _line = line + ) end function fix_doctest(result::Result, str, doc::Documenter.Document) @@ -454,7 +496,7 @@ function fix_doctest(result::Result, str, doc::Documenter.Document) # read the file containing the code block content = read(filename, String) # output stream - io = IOBuffer(sizehint = sizeof(content)) + io = IOBuffer(sizehint=sizeof(content)) # first look for the entire code block # make a regex of the code that matches leading whitespace rcode = "(\\h*)" * replace(Documenter.regex_escape(code), "\\n" => "\\n\\h*") @@ -488,7 +530,8 @@ function fix_doctest(result::Result, str, doc::Documenter.Document) newcode *= str newcode *= code[nextind(code, last(inputidx)):end] else - newcode *= replace(code[nextind(code, last(inputidx)):end], result.output => str, count = 1) + newcode *= + replace(code[nextind(code, last(inputidx)):end], result.output => str, count=1) end # replace internal code block with the non-indented new code, needed if we come back # looking to replace output in the same code block later @@ -509,8 +552,8 @@ const PROMPT_REGEX = r"^julia> (.*)$" const SOURCE_REGEX = r"^ (.*)$" function repl_splitter(code) - lines = split(string(code, "\n"), '\n') - input = String[] + lines = split(string(code, "\n"), '\n') + input = String[] output = String[] buffer = IOBuffer() # temporary buffer for doctest inputs and outputs found_first_prompt = false diff --git a/src/documents.jl b/src/documents.jl index 33e72ce035..d7f38b7a08 100644 --- a/src/documents.jl +++ b/src/documents.jl @@ -13,7 +13,7 @@ abstract type AbstractDocumenterBlock <: MarkdownAST.AbstractBlock end """ mutable struct Globals mod :: Module - meta :: Dict{Symbol, Any} + meta :: Dict{Symbol,Any} end Globals() = Globals(Main, Dict()) @@ -21,21 +21,21 @@ Globals() = Globals(Main, Dict()) Represents a single markdown file. """ struct Page - source :: String - build :: String - workdir :: Union{Symbol,String} + source::String + build::String + workdir::Union{Symbol,String} """ Ordered list of raw toplevel markdown nodes from the parsed page contents. This vector should be considered immutable. """ - elements :: Vector + elements::Vector """ Each element in `.elements` maps to an "expanded" element. This may be itself if the element does not need expanding or some other object, such as a `DocsNode` in the case of `@docs` code blocks. """ - globals :: Globals - mdast :: MarkdownAST.Node{Nothing} + globals::Globals + mdast::MarkdownAST.Node{Nothing} end function Page(source::AbstractString, build::AbstractString, workdir::AbstractString) # The Markdown standard library parser is sensitive to line endings: @@ -59,7 +59,8 @@ function Page(source::AbstractString, build::AbstractString, workdir::AbstractSt end # FIXME -- special overload for parseblock -parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) = parseblock(code, doc, page.source; kwargs...) +parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) = + parseblock(code, doc, page.source; kwargs...) # Document blueprints. # -------------------- @@ -67,8 +68,8 @@ parseblock(code::AbstractString, doc, page::Documenter.Page; kwargs...) = parseb # Should contain all the information that is necessary to build a document. # Currently has enough information to just run doctests. struct DocumentBlueprint - pages :: Dict{String, Page} # Markdown files only. - modules :: Set{Module} # Which modules to check for missing docs? + pages::Dict{String,Page} # Markdown files only. + modules::Set{Module} # Which modules to check for missing docs? end @@ -78,24 +79,25 @@ end ## IndexNode. struct IndexNode <: AbstractDocumenterBlock - pages :: Vector{String} # Which pages to include in the index? Set by user. - modules :: Vector{Module} # Which modules to include? Set by user. - order :: Vector{Symbol} # What order should docs be listed in? Set by user. - build :: String # Path to the file where this index will appear. - source :: String # Path to the file where this index was written. - elements :: Vector # (object, doc, page, mod, cat)-tuple for constructing links. - codeblock :: MarkdownAST.CodeBlock # original code block - - function IndexNode(codeblock; - # TODO: Fix difference between uppercase and lowercase naming of keys. - # Perhaps deprecate the uppercase versions? Same with `ContentsNode`. - Pages = [], - Modules = [], - Order = [:module, :constant, :type, :function, :macro], - build = error("missing value for `build` in `IndexNode`."), - source = error("missing value for `source` in `IndexNode`."), - others... - ) + pages :: Vector{String} # Which pages to include in the index? Set by user. + modules :: Vector{Module} # Which modules to include? Set by user. + order :: Vector{Symbol} # What order should docs be listed in? Set by user. + build :: String # Path to the file where this index will appear. + source :: String # Path to the file where this index was written. + elements :: Vector # (object, doc, page, mod, cat)-tuple for constructing links. + codeblock :: MarkdownAST.CodeBlock # original code block + + function IndexNode( + codeblock; + # TODO: Fix difference between uppercase and lowercase naming of keys. + # Perhaps deprecate the uppercase versions? Same with `ContentsNode`. + Pages=[], + Modules=[], + Order=[:module, :constant, :type, :function, :macro], + build=error("missing value for `build` in `IndexNode`."), + source=error("missing value for `source` in `IndexNode`."), + others... + ) if !isempty(others) @warn( "In file $source: the following unsupported keyword " * @@ -110,21 +112,22 @@ end ## ContentsNode. struct ContentsNode <: AbstractDocumenterBlock - pages :: Vector{String} # Which pages should be included in contents? Set by user. - mindepth :: Int # Minimum header level that should be displayed. Set by user. - depth :: Int # Down to which level should headers be displayed? Set by user. - build :: String # Same as for `IndexNode`s. - source :: String # Same as for `IndexNode`s. - elements :: Vector # (order, page, anchor)-tuple for constructing links. - codeblock :: MarkdownAST.CodeBlock # original code block - - function ContentsNode(codeblock; - Pages = [], - Depth = 1:2, - build = error("missing value for `build` in `ContentsNode`."), - source = error("missing value for `source` in `ContentsNode`."), - others... - ) + pages :: Vector{String} # Which pages should be included in contents? Set by user. + mindepth :: Int # Minimum header level that should be displayed. Set by user. + depth :: Int # Down to which level should headers be displayed? Set by user. + build :: String # Same as for `IndexNode`s. + source :: String # Same as for `IndexNode`s. + elements :: Vector # (order, page, anchor)-tuple for constructing links. + codeblock :: MarkdownAST.CodeBlock # original code block + + function ContentsNode( + codeblock; + Pages=[], + Depth=1:2, + build=error("missing value for `build` in `ContentsNode`."), + source=error("missing value for `source` in `ContentsNode`."), + others... + ) if Depth isa Integer Depth = 1:Depth end @@ -142,8 +145,8 @@ end ## Other nodes struct MetaNode <: AbstractDocumenterBlock - codeblock :: MarkdownAST.CodeBlock - dict :: Dict{Symbol, Any} + codeblock::MarkdownAST.CodeBlock + dict::Dict{Symbol,Any} end struct MethodNode @@ -152,26 +155,26 @@ struct MethodNode end struct DocsNode <: AbstractDocumenterBlock - anchor :: Anchor - object :: Object - page :: Documenter.Page + anchor :: Anchor + object :: Object + page :: Documenter.Page # MarkdownAST support. # TODO: should be the docstring components (i.e. .mdasts) be stored as child nodes? mdasts :: Vector{MarkdownAST.Node{Nothing}} results :: Vector{Base.Docs.DocStr} - metas :: Vector{Dict{Symbol, Any}} + metas :: Vector{Dict{Symbol,Any}} function DocsNode(anchor, object, page) new(anchor, object, page, [], [], []) end end struct DocsNodes - nodes :: Vector{Union{DocsNode,Markdown.Admonition}} + nodes::Vector{Union{DocsNode,Markdown.Admonition}} end struct EvalNode <: AbstractDocumenterBlock - codeblock :: MarkdownAST.CodeBlock - result :: Union{MarkdownAST.Node, Nothing} + codeblock::MarkdownAST.CodeBlock + result::Union{MarkdownAST.Node,Nothing} end struct RawNode <: AbstractDocumenterBlock @@ -184,14 +187,14 @@ end # In the MarkdownAST representation, the dictionaries get converted into # MultiOutputElement elements. struct MultiOutput <: AbstractDocumenterBlock - codeblock :: MarkdownAST.CodeBlock + codeblock::MarkdownAST.CodeBlock end # For @repl blocks we store the inputs and outputs as separate Markdown.Code # objects, and then combine them in the writer. When converting to MarkdownAST, # those separate code blocks become child nodes. struct MultiCodeBlock <: AbstractDocumenterBlock - codeblock :: MarkdownAST.CodeBlock + codeblock::MarkdownAST.CodeBlock language::String content::Vector{Markdown.Code} end @@ -236,19 +239,20 @@ mutable struct NavNode `nothing` if the `NavNode` is a non-page node of the navigation tree, otherwise the string should be a valid key in `doc.blueprint.pages` """ - page :: Union{String, Nothing} + page::Union{String,Nothing} """ If not `nothing`, specifies the text that should be displayed in navigation links etc. instead of the automatically determined text. """ - title_override :: Union{String, Nothing} - parent :: Union{NavNode, Nothing} + title_override :: Union{String,Nothing} + parent :: Union{NavNode,Nothing} children :: Vector{NavNode} visible :: Bool - prev :: Union{NavNode, Nothing} - next :: Union{NavNode, Nothing} + prev :: Union{NavNode,Nothing} + next :: Union{NavNode,Nothing} end -NavNode(page, title_override, parent) = NavNode(page, title_override, parent, [], true, nothing, nothing) +NavNode(page, title_override, parent) = + NavNode(page, title_override, parent, [], true, nothing, nothing) # This method ensures that we do not print the whole navtree in case we ever happen to print # a NavNode in some debug output somewhere. function Base.show(io::IO, n::NavNode) @@ -261,8 +265,8 @@ Constructs a list of the ancestors of the `navnode` (including the `navnode` its ordered so that the root of the navigation tree is the first and `navnode` itself is the last item. """ -navpath(navnode::NavNode) = navnode.parent === nothing ? [navnode] : - push!(navpath(navnode.parent), navnode) +navpath(navnode::NavNode) = + navnode.parent === nothing ? [navnode] : push!(navpath(navnode.parent), navnode) # Inner Document Fields. @@ -284,7 +288,8 @@ function RemoteRepository(root::AbstractString, remote::Remotes.Remote) RemoteRepository(realpath(root), remote, repo_commit(root)) catch e e isa RepoCommitError || rethrow() - @error "Unable to determine the commit for the remote repository:\n$(e.msg)" e.directory exception = e.err_bt + @error "Unable to determine the commit for the remote repository:\n$(e.msg)" e.directory exception = + e.err_bt throw(ArgumentError(""" Unable to determine the commit for the remote repository at $(root) => $(remote)""")) @@ -296,27 +301,28 @@ $(SIGNATURES) Returns the first 5 characters of the current Git commit hash of the remote. """ -shortcommit(remoteref::RemoteRepository) = (length(remoteref.commit) > 5) ? remoteref.commit[1:5] : remoteref.commit +shortcommit(remoteref::RemoteRepository) = + (length(remoteref.commit) > 5) ? remoteref.commit[1:5] : remoteref.commit """ User-specified values used to control the generation process. """ struct User - root :: String # An absolute path to the root directory of the document. - source :: String # Parent directory is `.root`. Where files are read from. - build :: String # Parent directory is also `.root`. Where files are written to. - workdir :: Union{Symbol,String} # Parent directory is also `.root`. Where code is executed from. - format :: Vector{Writer} # What format to render the final document with? - clean :: Bool # Empty the `build` directory before starting a new build? - doctest :: Union{Bool,Symbol} # Run doctests? + root::String # An absolute path to the root directory of the document. + source::String # Parent directory is `.root`. Where files are read from. + build::String # Parent directory is also `.root`. Where files are written to. + workdir::Union{Symbol,String} # Parent directory is also `.root`. Where code is executed from. + format::Vector{Writer} # What format to render the final document with? + clean::Bool # Empty the `build` directory before starting a new build? + doctest::Union{Bool,Symbol} # Run doctests? linkcheck::Bool # Check external links.. linkcheck_ignore::Vector{Union{String,Regex}} # ..and then ignore (some of) them. linkcheck_timeout::Real # ..but only wait this many seconds for each one. checkdocs::Symbol # Check objects missing from `@docs` blocks. `:none`, `:exports`, or `:all`. doctestfilters::Vector{Regex} # Filtering for doctests warnonly::Vector{Symbol} # List of docerror groups that should only warn, rather than cause a build failure - pages :: Vector{Any} # Ordering of document pages specified by the user. - pagesonly :: Bool # Discard any .md pages from processing that are not in .pages + pages::Vector{Any} # Ordering of document pages specified by the user. + pagesonly::Bool # Discard any .md pages from processing that are not in .pages expandfirst::Vector{String} # List of pages that get "expanded" before others # Remote Git repository information # @@ -333,32 +339,32 @@ struct User # While the initial list in .remotes is populated when we construct the Document # object, we also dynamically add links to the .remotes array as we check different # files, by looking at .git directories. - remote :: Union{Remotes.Remote,Nothing} - remotes :: Union{Vector{RemoteRepository},Nothing} - sitename:: String - authors :: String - version :: String # version string used in the version selector by default + remote::Union{Remotes.Remote,Nothing} + remotes::Union{Vector{RemoteRepository},Nothing} + sitename::String + authors::String + version::String # version string used in the version selector by default highlightsig::Bool # assume leading unlabeled code blocks in docstrings to be Julia. - draft :: Bool + draft::Bool end """ Private state used to control the generation process. """ struct Internal - assets :: String # Path where asset files will be copied to. - navtree :: Vector{NavNode} # A vector of top-level navigation items. - navlist :: Vector{NavNode} # An ordered list of `NavNode`s that point to actual pages - headers :: AnchorMap # See `modules/Anchors.jl`. Tracks `Markdown.Header` objects. - docs :: AnchorMap # See `modules/Anchors.jl`. Tracks `@docs` docstrings. - bindings:: IdDict{Any,Any} # Tracks insertion order of object per-binding. - objects :: IdDict{Any,Any} # Tracks which `Objects` are included in the `Document`. - contentsnodes :: Vector{ContentsNode} - indexnodes :: Vector{IndexNode} - locallinks :: IdDict{MarkdownAST.Link, String} + assets::String # Path where asset files will be copied to. + navtree::Vector{NavNode} # A vector of top-level navigation items. + navlist::Vector{NavNode} # An ordered list of `NavNode`s that point to actual pages + headers::AnchorMap # See `modules/Anchors.jl`. Tracks `Markdown.Header` objects. + docs::AnchorMap # See `modules/Anchors.jl`. Tracks `@docs` docstrings. + bindings::IdDict{Any,Any} # Tracks insertion order of object per-binding. + objects::IdDict{Any,Any} # Tracks which `Objects` are included in the `Document`. + contentsnodes::Vector{ContentsNode} + indexnodes::Vector{IndexNode} + locallinks::IdDict{MarkdownAST.Link,String} errors::Set{Symbol} - src_to_uuid::Dict{String, Base.UUID} # These two are used to cache information from Pkg - uuid_to_version_info::Dict{Base.UUID, Tuple{VersionNumber, String}} + src_to_uuid::Dict{String,Base.UUID} # These two are used to cache information from Pkg + uuid_to_version_info::Dict{Base.UUID,Tuple{VersionNumber,String}} end # Document. @@ -368,40 +374,40 @@ end Represents an entire document. """ struct Document - user :: User # Set by the user via `makedocs`. - internal :: Internal # Computed values. - plugins :: Dict{DataType, Plugin} - blueprint :: DocumentBlueprint + user::User # Set by the user via `makedocs`. + internal::Internal # Computed values. + plugins::Dict{DataType,Plugin} + blueprint::DocumentBlueprint end function Document(; - root :: AbstractString = currentdir(), - source :: AbstractString = "src", - build :: AbstractString = "build", - workdir :: Union{Symbol, AbstractString} = :build, - format :: Any = HTML(), - clean :: Bool = true, - doctest :: Union{Bool,Symbol} = true, - linkcheck:: Bool = false, - linkcheck_ignore :: Vector = [], - linkcheck_timeout :: Real = 10, - checkdocs::Symbol = :all, - doctestfilters::Vector{Regex}= Regex[], - warnonly :: Union{Bool,Symbol,Vector{Symbol}} = Symbol[], - modules :: ModVec = Module[], - pages :: Vector = Any[], - pagesonly:: Bool = false, - expandfirst :: Vector = String[], - plugins :: Vector = Plugin[], - repo :: Union{Remotes.Remote, AbstractString} = "", - remotes :: Union{Dict, Nothing} = Dict(), - sitename :: AbstractString = "", - authors :: AbstractString = "", - version :: AbstractString = "", - highlightsig::Bool = true, - draft::Bool = false, - others... - ) + root::AbstractString=currentdir(), + source::AbstractString="src", + build::AbstractString="build", + workdir::Union{Symbol,AbstractString}=:build, + format::Any=HTML(), + clean::Bool=true, + doctest::Union{Bool,Symbol}=true, + linkcheck::Bool=false, + linkcheck_ignore::Vector=[], + linkcheck_timeout::Real=10, + checkdocs::Symbol=:all, + doctestfilters::Vector{Regex}=Regex[], + warnonly::Union{Bool,Symbol,Vector{Symbol}}=Symbol[], + modules::ModVec=Module[], + pages::Vector=Any[], + pagesonly::Bool=false, + expandfirst::Vector=String[], + plugins::Vector=Plugin[], + repo::Union{Remotes.Remote,AbstractString}="", + remotes::Union{Dict,Nothing}=Dict(), + sitename::AbstractString="", + authors::AbstractString="", + version::AbstractString="", + highlightsig::Bool=true, + draft::Bool=false, + others... +) if !isempty(others) msg = "makedocs() got passed invalid keyword arguments:" @@ -474,25 +480,25 @@ function Document(; IdDict{Any,Any}(), [], [], - Dict{Markdown.Link, String}(), + Dict{Markdown.Link,String}(), Set{Symbol}(), - Dict{String, String}(), - Dict{String, Tuple{String, String}}() + Dict{String,String}(), + Dict{String,Tuple{String,String}}() ) - plugin_dict = Dict{DataType, Plugin}() + plugin_dict = Dict{DataType,Plugin}() for plugin in plugins - plugin isa Plugin || - throw(ArgumentError("$(typeof(plugin)) in `plugins=` is not a subtype of `Documenter.Plugin`.")) + plugin isa Plugin || throw( + ArgumentError( + "$(typeof(plugin)) in `plugins=` is not a subtype of `Documenter.Plugin`." + ) + ) haskey(plugin_dict, typeof(plugin)) && throw(ArgumentError("only one copy of $(typeof(plugin)) may be passed.")) plugin_dict[typeof(plugin)] = plugin end - blueprint = DocumentBlueprint( - Dict{String, Page}(), - submodules(modules), - ) + blueprint = DocumentBlueprint(Dict{String,Page}(), submodules(modules),) Document(user, internal, plugin_dict, blueprint) end @@ -505,7 +511,9 @@ function interpret_repo_and_remotes(; root, repo, remotes) # argument of makedocs) path = joinpath(root, path) if !isdir(path) - throw(ArgumentError(("Invalid local path in remotes (not a directory): $(path)"))) + throw( + ArgumentError(("Invalid local path in remotes (not a directory): $(path)")) + ) end path = realpath(path) # We'll also check that there are no duplicate entries. @@ -517,7 +525,7 @@ function interpret_repo_and_remotes(; root, repo, remotes) """)) end # Now we actually check the remotes themselves - remote = if remoteref isa Tuple{Remotes.Remote, AbstractString} + remote = if remoteref isa Tuple{Remotes.Remote,AbstractString} RemoteRepository(path, remoteref[1], remoteref[2]) elseif remoteref isa Remotes.Remote RemoteRepository(path, remoteref) @@ -531,17 +539,18 @@ function interpret_repo_and_remotes(; root, repo, remotes) end # We'll normalize repo to be a `Remotes.Remote` object (or nothing if omitted) - repo_normalized::Union{Remotes.Remote, Nothing} = if isa(repo, AbstractString) && isempty(repo) - # If the user does not provide the `repo` argument, we'll try to automatically - # detect the remote repository later. But for now, we'll set it to `nothing`. - nothing - elseif repo isa AbstractString - # Use the old template string parsing logic if a string was passed. - Remotes.URL(repo) - else - # Otherwise it should be some Remote object, so we'll just use that. - repo - end + repo_normalized::Union{Remotes.Remote,Nothing} = + if isa(repo, AbstractString) && isempty(repo) + # If the user does not provide the `repo` argument, we'll try to automatically + # detect the remote repository later. But for now, we'll set it to `nothing`. + nothing + elseif repo isa AbstractString + # Use the old template string parsing logic if a string was passed. + Remotes.URL(repo) + else + # Otherwise it should be some Remote object, so we'll just use that. + repo + end # Now we sort out the interaction between `repo` and `remotes`. Our goal is to make sure that we have a # value in both remotes for the repository root, and also that we get the correct value for the main @@ -562,8 +571,9 @@ function interpret_repo_and_remotes(; root, repo, remotes) end return false end - makedocs_root_repo::Union{String, Nothing} = find_root_parent(is_git_repo_root, root) - makedocs_root_remote::Union{Remotes.Remote, Nothing} = isnothing(makedocs_root_repo) ? nothing : getremote(makedocs_root_repo) + makedocs_root_repo::Union{String,Nothing} = find_root_parent(is_git_repo_root, root) + makedocs_root_remote::Union{Remotes.Remote,Nothing} = + isnothing(makedocs_root_repo) ? nothing : getremote(makedocs_root_repo) @debug "interpret_repo_and_remotes" remotes_checked repo_normalized makedocs_root_remoteref makedocs_root_repo makedocs_root_remote if !isnothing(makedocs_root_remoteref) && !isnothing(makedocs_root_repo) # If both are set, then there is potential for conflict. @@ -587,7 +597,7 @@ function interpret_repo_and_remotes(; root, repo, remotes) # with remotes. In that case, the remote in `remotes` takes precedence as well. @debug "Remotes: `remotes` takes precedence over automatically determined remote" makedocs_root_remoteref makedocs_root_repo makedocs_root_remote repo_normalized makedocs_root_remote = makedocs_root_remoteref.remote - elseif startswith(makedocs_root_remoteref.root, makedocs_root_repo) + elseif startswith(makedocs_root_remoteref.root, makedocs_root_repo) # In this case we determined that root of the repository is more specific than # whatever we found in remotes. So the main remote will be determined from the Git # repository. This will be a no-op, except that `repo` argument may override the @@ -599,7 +609,10 @@ function interpret_repo_and_remotes(; root, repo, remotes) @debug "Remotes: repo not set, using automatically determined remote" makedocs_root_remoteref makedocs_root_repo makedocs_root_remote repo_normalized end # Since this path was not in remotes, we also need to add it there. - addremote!(remotes_checked, RemoteRepository(makedocs_root_repo, makedocs_root_remote)) + addremote!( + remotes_checked, + RemoteRepository(makedocs_root_repo, makedocs_root_remote) + ) else # The final case is where the two repo paths have different roots, which should never # happen. @@ -640,7 +653,10 @@ function interpret_repo_and_remotes(; root, repo, remotes) throw(ArgumentError(err)) end # Since this path was not in remotes, we also need to add it there. - addremote!(remotes_checked, RemoteRepository(makedocs_root_repo, makedocs_root_remote)) + addremote!( + remotes_checked, + RemoteRepository(makedocs_root_repo, makedocs_root_remote) + ) else # Finally, if we're neither in a git repo, and nothing is in remotes, err = "Unable to automatically determine remote for main repo." @@ -705,12 +721,13 @@ function addremote!(remotes::Vector{RemoteRepository}, remoteref::RemoteReposito sortremotes!(remotes) return nothing end -addremote!(doc::Document, remoteref::RemoteRepository) = addremote!(doc.user.remotes, remoteref) +addremote!(doc::Document, remoteref::RemoteRepository) = + addremote!(doc.user.remotes, remoteref) # We'll sort the remotes, first, to make sure that the longer paths come first, # so that we could match them first. How the individual paths are sorted is pretty # unimportant, but we just want to make sure they are sorted in some well-defined # order. -sortremotes!(remotes::Vector{RemoteRepository}) = sort!(remotes, lt = lt_remotepair) +sortremotes!(remotes::Vector{RemoteRepository}) = sort!(remotes, lt=lt_remotepair) function lt_remotepair(r1::RemoteRepository, r2::RemoteRepository) if length(r1.root) == length(r2.root) return r1.root < r2.root @@ -846,7 +863,7 @@ function relpath_from_remote_root(doc::Document, path::AbstractString) else # When root_remote is set, so should be root_directory @assert !isnothing(root_directory) - return (; repo = root_remote, relpath = relpath(path, root_directory)) + return (; repo=root_remote, relpath=relpath(path, root_directory)) end end @@ -870,9 +887,8 @@ function edit_url(doc::Document, path; rev::Union{AbstractString,Nothing}) return repofile(remoteref.repo.remote, rev, remoteref.relpath) end -source_url(doc::Document, docstring) = source_url( - doc, docstring.data[:module], docstring.data[:path], linerange(docstring) -) +source_url(doc::Document, docstring) = + source_url(doc, docstring.data[:module], docstring.data[:path], linerange(docstring)) function source_url(doc::Document, mod::Module, file::AbstractString, linerange) # If the user has disable remote links, we abort immediately @@ -892,10 +908,15 @@ function source_url(doc::Document, mod::Module, file::AbstractString, linerange) isfile(file) || return nothing remoteref = relpath_from_remote_root(doc, file) if isnothing(remoteref) - throw(MissingRemoteError(; path = file, linerange, mod)) + throw(MissingRemoteError(; path=file, linerange, mod)) end @debug "source_url" mod file linerange remoteref - return repofile(remoteref.repo.remote, remoteref.repo.commit, remoteref.relpath, linerange) + return repofile( + remoteref.repo.remote, + remoteref.repo.commit, + remoteref.relpath, + linerange + ) end """ @@ -906,7 +927,7 @@ object of type `T` was an element of the `plugins` list passed to [`makedocs`](@ that object will be returned. Otherwise, a new `T` object will be created using the default constructor `T()`. Subsequent calls to `getplugin(doc, T)` return the same object. """ -function getplugin(doc::Document, plugin_type::Type{T}) where T <: Plugin +function getplugin(doc::Document, plugin_type::Type{T}) where {T<:Plugin} if !haskey(doc.plugins, plugin_type) doc.plugins[plugin_type] = plugin_type() end @@ -916,7 +937,12 @@ end ## Methods -function addpage!(doc::Document, src::AbstractString, dst::AbstractString, wd::AbstractString) +function addpage!( + doc::Document, + src::AbstractString, + dst::AbstractString, + wd::AbstractString +) page = Page(src, dst, wd) # page's identifier is the path relative to the `doc.user.source` directory name = pagekey(doc, page) @@ -931,10 +957,7 @@ end # relative to doc.user.root). This function calculates the key corresponding # to a page. pagekey(doc::Document, page::Page) = normpath( - relpath( - joinpath(doc.user.root, page.source), - joinpath(doc.user.root, doc.user.source) - ) + relpath(joinpath(doc.user.root, page.source), joinpath(doc.user.root, doc.user.source)) ) """ @@ -960,8 +983,11 @@ function populate!(index::IndexNode, document::Document) page = relpath(doc.page.build, dirname(index.build)) mod = object.binding.mod # Include *all* signatures, whether they are `Union{}` or not. - cat = Symbol(lowercase(doccat(object.binding, Union{}))) - if is_canonical(object) && _isvalid(page, index.pages) && _isvalid(mod, index.modules) && _isvalid(cat, index.order) + cat = Symbol(lowercase(doccat(object.binding, Union{}))) + if is_canonical(object) && + _isvalid(page, index.pages) && + _isvalid(mod, index.modules) && + _isvalid(cat, index.order) push!(index.elements, (object, doc, page, mod, cat)) end end @@ -969,13 +995,13 @@ function populate!(index::IndexNode, document::Document) pagesmap = precedence(index.pages) modulesmap = precedence(index.modules) ordermap = precedence(index.order) - comparison = function(a, b) - (x = _compare(pagesmap, 3, a, b)) == 0 || return x < 0 # page + comparison = function (a, b) + (x = _compare(pagesmap, 3, a, b)) == 0 || return x < 0 # page (x = _compare(modulesmap, 4, a, b)) == 0 || return x < 0 # module - (x = _compare(ordermap, 5, a, b)) == 0 || return x < 0 # category + (x = _compare(ordermap, 5, a, b)) == 0 || return x < 0 # category string(a[1].binding) < string(b[1].binding) # object name end - sort!(index.elements, lt = comparison) + sort!(index.elements, lt=comparison) return index end @@ -995,11 +1021,11 @@ function populate!(contents::ContentsNode, document::Document) end # Sorting contents links. pagesmap = precedence(contents.pages) - comparison = function(a, b) + comparison = function (a, b) (x = _compare(pagesmap, 2, a, b)) == 0 || return x < 0 # page a[1] < b[1] # anchor order end - sort!(contents.elements, lt = comparison) + sort!(contents.elements, lt=comparison) return contents end @@ -1018,8 +1044,9 @@ doctest_replace!(docsnode::DocsNode) = foreach(doctest_replace!, docsnode.mdasts function doctest_replace!(block::MarkdownAST.CodeBlock) startswith(block.info, "jldoctest") || return # suppress output for `#output`-style doctests with `output=false` kwarg - if occursin(r"^# output$"m, block.code) && occursin(r";.*output\h*=\h*false", block.info) - input = first(split(block.code, "# output\n", limit = 2)) + if occursin(r"^# output$"m, block.code) && + occursin(r";.*output\h*=\h*false", block.info) + input = first(split(block.code, "# output\n", limit=2)) block.code = rstrip(input) end # correct the language field @@ -1029,7 +1056,7 @@ doctest_replace!(@nospecialize _) = nothing function buildnode(T::Type, block, doc, page) mod = get(page.globals.meta, :CurrentModule, Main) - dict = Dict{Symbol, Any}(:source => page.source, :build => page.build) + dict = Dict{Symbol,Any}(:source => page.source, :build => page.build) for (ex, str) in parseblock(block.code, doc, page) if isassign(ex) cd(dirname(page.source)) do @@ -1052,7 +1079,7 @@ precedence(vec) = Dict(zip(vec, 1:length(vec))) # Conversion to MarkdownAST, for writers struct AnchoredHeader <: AbstractDocumenterBlock - anchor :: Anchor + anchor::Anchor end MarkdownAST.iscontainer(::AnchoredHeader) = true @@ -1061,25 +1088,26 @@ MarkdownAST.iscontainer(::AnchoredHeader) = true # In addition, the child node can also be an Admonition in case there was an error # in splicing in a docstring. struct DocsNodesBlock <: AbstractDocumenterBlock - codeblock :: MarkdownAST.CodeBlock + codeblock::MarkdownAST.CodeBlock end MarkdownAST.iscontainer(::DocsNodesBlock) = true MarkdownAST.can_contain(::DocsNodesBlock, ::MarkdownAST.AbstractElement) = false -MarkdownAST.can_contain(::DocsNodesBlock, ::Union{DocsNode, MarkdownAST.Admonition}) = true +MarkdownAST.can_contain(::DocsNodesBlock, ::Union{DocsNode,MarkdownAST.Admonition}) = true MarkdownAST.iscontainer(::MultiCodeBlock) = true MarkdownAST.can_contain(::MultiCodeBlock, ::MarkdownAST.Code) = true struct MultiOutputElement <: AbstractDocumenterBlock - element :: Any + element::Any end MarkdownAST.iscontainer(::MultiOutput) = true -MarkdownAST.can_contain(::MultiOutput, ::Union{MultiOutputElement,MarkdownAST.CodeBlock}) = true +MarkdownAST.can_contain(::MultiOutput, ::Union{MultiOutputElement,MarkdownAST.CodeBlock}) = + true # In the SetupBlocks expander, we map @setup nodes to Markdown.MD() objects struct SetupNode <: AbstractDocumenterBlock - name :: String - code :: String + name::String + code::String end # Override the show for DocumenterBlockTypes so that we would not print too much @@ -1087,10 +1115,14 @@ end Base.show(io::IO, node::AbstractDocumenterBlock) = print(io, typeof(node), "([...])") # Extend MDFlatten.mdflatten to support the Documenter-specific elements -MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::AnchoredHeader) = MDFlatten.mdflatten(io, node.children) -MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::SetupNode) = MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock(e.name, e.code)) -MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::RawNode) = MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock("@raw $(e.name)", e.text)) -MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::AbstractDocumenterBlock) = MDFlatten.mdflatten(io, node, e.codeblock) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::AnchoredHeader) = + MDFlatten.mdflatten(io, node.children) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::SetupNode) = + MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock(e.name, e.code)) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::RawNode) = + MDFlatten.mdflatten(io, node, MarkdownAST.CodeBlock("@raw $(e.name)", e.text)) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, e::AbstractDocumenterBlock) = + MDFlatten.mdflatten(io, node, e.codeblock) function MDFlatten.mdflatten(io, ::MarkdownAST.Node, e::DocsNode) # this special case separates top level blocks with newlines for node in e.mdasts @@ -1100,8 +1132,10 @@ function MDFlatten.mdflatten(io, ::MarkdownAST.Node, e::DocsNode) print(io, "\n\n\n\n") end end -MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::PageLink) = MDFlatten.mdflatten(io, node.children) -MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalLink) = MDFlatten.mdflatten(io, node.children) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::PageLink) = + MDFlatten.mdflatten(io, node.children) +MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalLink) = + MDFlatten.mdflatten(io, node.children) function MDFlatten.mdflatten(io, node::MarkdownAST.Node, ::LocalImage) print(io, "(Image: ") MDFlatten.mdflatten(io, node.children) diff --git a/src/expander_pipeline.jl b/src/expander_pipeline.jl index dcaa707765..4b9b2b9b81 100644 --- a/src/expander_pipeline.jl +++ b/src/expander_pipeline.jl @@ -31,12 +31,8 @@ Similar to `expand()`, but recursively calls itself on all descendants of `node` and applies `NestedExpanderPipeline` instead of `ExpanderPipeline`. """ function expand_recursively(node, page, doc) - if typeof(node.element) in ( - MarkdownAST.Admonition, - MarkdownAST.BlockQuote, - MarkdownAST.Item, - MarkdownAST.List, - ) + if typeof(node.element) in + (MarkdownAST.Admonition, MarkdownAST.BlockQuote, MarkdownAST.Item, MarkdownAST.List,) for child in node.children Selectors.dispatch(Expanders.NestedExpanderPipeline, child, page, doc) expand_recursively(child, page, doc) @@ -47,7 +43,8 @@ end # run some checks after expanding the page function pagecheck(page) # make sure there is no "continued code" lingering around - if haskey(page.globals.meta, :ContinuedCode) && !isempty(page.globals.meta[:ContinuedCode]) + if haskey(page.globals.meta, :ContinuedCode) && + !isempty(page.globals.meta[:ContinuedCode]) @warn "code from a continued @example block unused in $(Documenter.locrepr(page.source))." end end @@ -59,162 +56,169 @@ function create_draft_result!(node::Node; blocktype="code") codeblock.info = "julia" node.element = Documenter.MultiOutput(codeblock) push!(node.children, Node(codeblock)) - push!(node.children, Node(Documenter.MultiOutputElement( - Dict{MIME,Any}(MIME"text/plain"() => "<< $(blocktype)-block not executed in draft mode >>") - ))) + push!( + node.children, + Node( + Documenter.MultiOutputElement( + Dict{MIME,Any}( + MIME"text/plain"() => "<< $(blocktype)-block not executed in draft mode >>" + ) + ) + ) + ) end # Expander Pipeline. module Expanders - import ..Documenter # for docstring references - import ..Documenter.Selectors - - """ - The default node expander "pipeline", which consists of the following expanders: - - - [`TrackHeaders`](@ref) - - [`MetaBlocks`](@ref) - - [`DocsBlocks`](@ref) - - [`AutoDocsBlocks`](@ref) - - [`EvalBlocks`](@ref) - - [`IndexBlocks`](@ref) - - [`ContentsBlocks`](@ref) - - [`ExampleBlocks`](@ref) - - [`SetupBlocks`](@ref) - - [`REPLBlocks`](@ref) - - """ - abstract type ExpanderPipeline <: Selectors.AbstractSelector end - - """ - The subset of [node expanders](@ref ExpanderPipeline) which also apply in nested contexts. - - See also [`expand_recursively`](@ref Documenter.expand_recursively). - """ - abstract type NestedExpanderPipeline <: ExpanderPipeline end - - """ - Tracks all `Markdown.Header` nodes found in the parsed markdown files and stores an - [`Anchor`](@ref Documenter.Anchor) object for each one. - """ - abstract type TrackHeaders <: ExpanderPipeline end - - """ - Parses each code block where the language is `@meta` and evaluates the key/value pairs found - within the block, i.e. - - ````markdown - ```@meta - CurrentModule = Documenter - DocTestSetup = quote - using Documenter - end - ``` - ```` - """ - abstract type MetaBlocks <: ExpanderPipeline end - - """ - Parses each code block where the language is `@docs` and evaluates the expressions found - within the block. Replaces the block with the docstrings associated with each expression. - - ````markdown - ```@docs - Documenter - makedocs - deploydocs - ``` - ```` - """ - abstract type DocsBlocks <: ExpanderPipeline end - - """ - Parses each code block where the language is `@autodocs` and replaces it with all the - docstrings that match the provided key/value pairs `Modules = ...` and `Order = ...`. - - ````markdown - ```@autodocs - Modules = [Foo, Bar] - Order = [:function, :type] - ``` - ```` - """ - abstract type AutoDocsBlocks <: ExpanderPipeline end - - """ - Parses each code block where the language is `@eval` and evaluates it's content. Replaces - the block with the value resulting from the evaluation. This can be useful for inserting - generated content into a document such as plots. - - ````markdown - ```@eval - using PyPlot - x = linspace(-π, π) - y = sin(x) - plot(x, y, color = "red") - savefig("plot.svg") - Markdown.parse("![Plot](plot.svg)") - ``` - ```` - """ - abstract type EvalBlocks <: NestedExpanderPipeline end - - abstract type RawBlocks <: NestedExpanderPipeline end - - """ - Parses each code block where the language is `@index` and replaces it with an index of all - docstrings spliced into the document. The pages that are included can be set using a - key/value pair `Pages = [...]` such as - - ````markdown - ```@index - Pages = ["foo.md", "bar.md"] - ``` - ```` - """ - abstract type IndexBlocks <: ExpanderPipeline end - - """ - Parses each code block where the language is `@contents` and replaces it with a nested list - of all `Header` nodes in the generated document. The pages and depth of the list can be set - using `Pages = [...]` and `Depth = N` where `N` is and integer. - - ````markdown - ```@contents - Pages = ["foo.md", "bar.md"] - Depth = 1 - ``` - ```` - The default `Depth` value is `2`. - """ - abstract type ContentsBlocks <: ExpanderPipeline end - - """ - Parses each code block where the language is `@example` and evaluates the parsed Julia code - found within. The resulting value is then inserted into the final document after the source - code. - - ````markdown - ```@example - a = 1 - b = 2 - a + b - ``` - ```` - """ - abstract type ExampleBlocks <: NestedExpanderPipeline end - - """ - Similar to the [`ExampleBlocks`](@ref) expander, but inserts a Julia REPL prompt before each - toplevel expression in the final document. - """ - abstract type REPLBlocks <: NestedExpanderPipeline end - - """ - Similar to the [`ExampleBlocks`](@ref) expander, but hides all output in the final document. - """ - abstract type SetupBlocks <: NestedExpanderPipeline end +import ..Documenter # for docstring references +import ..Documenter.Selectors + +""" +The default node expander "pipeline", which consists of the following expanders: + +- [`TrackHeaders`](@ref) +- [`MetaBlocks`](@ref) +- [`DocsBlocks`](@ref) +- [`AutoDocsBlocks`](@ref) +- [`EvalBlocks`](@ref) +- [`IndexBlocks`](@ref) +- [`ContentsBlocks`](@ref) +- [`ExampleBlocks`](@ref) +- [`SetupBlocks`](@ref) +- [`REPLBlocks`](@ref) + +""" +abstract type ExpanderPipeline <: Selectors.AbstractSelector end + +""" +The subset of [node expanders](@ref ExpanderPipeline) which also apply in nested contexts. + +See also [`expand_recursively`](@ref Documenter.expand_recursively). +""" +abstract type NestedExpanderPipeline <: ExpanderPipeline end + +""" +Tracks all `Markdown.Header` nodes found in the parsed markdown files and stores an +[`Anchor`](@ref Documenter.Anchor) object for each one. +""" +abstract type TrackHeaders <: ExpanderPipeline end + +""" +Parses each code block where the language is `@meta` and evaluates the key/value pairs found +within the block, i.e. + +````markdown +```@meta +CurrentModule = Documenter +DocTestSetup = quote + using Documenter +end +``` +```` +""" +abstract type MetaBlocks <: ExpanderPipeline end + +""" +Parses each code block where the language is `@docs` and evaluates the expressions found +within the block. Replaces the block with the docstrings associated with each expression. + +````markdown +```@docs +Documenter +makedocs +deploydocs +``` +```` +""" +abstract type DocsBlocks <: ExpanderPipeline end + +""" +Parses each code block where the language is `@autodocs` and replaces it with all the +docstrings that match the provided key/value pairs `Modules = ...` and `Order = ...`. + +````markdown +```@autodocs +Modules = [Foo, Bar] +Order = [:function, :type] +``` +```` +""" +abstract type AutoDocsBlocks <: ExpanderPipeline end + +""" +Parses each code block where the language is `@eval` and evaluates it's content. Replaces +the block with the value resulting from the evaluation. This can be useful for inserting +generated content into a document such as plots. + +````markdown +```@eval +using PyPlot +x = linspace(-π, π) +y = sin(x) +plot(x, y, color = "red") +savefig("plot.svg") +Markdown.parse("![Plot](plot.svg)") +``` +```` +""" +abstract type EvalBlocks <: NestedExpanderPipeline end + +abstract type RawBlocks <: NestedExpanderPipeline end + +""" +Parses each code block where the language is `@index` and replaces it with an index of all +docstrings spliced into the document. The pages that are included can be set using a +key/value pair `Pages = [...]` such as + +````markdown +```@index +Pages = ["foo.md", "bar.md"] +``` +```` +""" +abstract type IndexBlocks <: ExpanderPipeline end + +""" +Parses each code block where the language is `@contents` and replaces it with a nested list +of all `Header` nodes in the generated document. The pages and depth of the list can be set +using `Pages = [...]` and `Depth = N` where `N` is and integer. + +````markdown +```@contents +Pages = ["foo.md", "bar.md"] +Depth = 1 +``` +```` +The default `Depth` value is `2`. +""" +abstract type ContentsBlocks <: ExpanderPipeline end + +""" +Parses each code block where the language is `@example` and evaluates the parsed Julia code +found within. The resulting value is then inserted into the final document after the source +code. + +````markdown +```@example +a = 1 +b = 2 +a + b +``` +```` +""" +abstract type ExampleBlocks <: NestedExpanderPipeline end + +""" +Similar to the [`ExampleBlocks`](@ref) expander, but inserts a Julia REPL prompt before each +toplevel expression in the final document. +""" +abstract type REPLBlocks <: NestedExpanderPipeline end + +""" +Similar to the [`ExampleBlocks`](@ref) expander, but hides all output in the final document. +""" +abstract type SetupBlocks <: NestedExpanderPipeline end end Selectors.order(::Type{Expanders.TrackHeaders}) = 1.0 @@ -229,17 +233,17 @@ Selectors.order(::Type{Expanders.REPLBlocks}) = 9.0 Selectors.order(::Type{Expanders.SetupBlocks}) = 10.0 Selectors.order(::Type{Expanders.RawBlocks}) = 11.0 -Selectors.matcher(::Type{Expanders.TrackHeaders}, node, page, doc) = isa(node.element, MarkdownAST.Heading) -Selectors.matcher(::Type{Expanders.MetaBlocks}, node, page, doc) = iscode(node, "@meta") -Selectors.matcher(::Type{Expanders.DocsBlocks}, node, page, doc) = iscode(node, r"^@docs") +Selectors.matcher(::Type{Expanders.TrackHeaders}, node, page, doc) = isa(node.element, MarkdownAST.Heading) +Selectors.matcher(::Type{Expanders.MetaBlocks}, node, page, doc) = iscode(node, "@meta") +Selectors.matcher(::Type{Expanders.DocsBlocks}, node, page, doc) = iscode(node, r"^@docs") Selectors.matcher(::Type{Expanders.AutoDocsBlocks}, node, page, doc) = iscode(node, r"^@autodocs") -Selectors.matcher(::Type{Expanders.EvalBlocks}, node, page, doc) = iscode(node, "@eval") -Selectors.matcher(::Type{Expanders.IndexBlocks}, node, page, doc) = iscode(node, "@index") +Selectors.matcher(::Type{Expanders.EvalBlocks}, node, page, doc) = iscode(node, "@eval") +Selectors.matcher(::Type{Expanders.IndexBlocks}, node, page, doc) = iscode(node, "@index") Selectors.matcher(::Type{Expanders.ContentsBlocks}, node, page, doc) = iscode(node, "@contents") -Selectors.matcher(::Type{Expanders.ExampleBlocks}, node, page, doc) = iscode(node, r"^@example") -Selectors.matcher(::Type{Expanders.REPLBlocks}, node, page, doc) = iscode(node, r"^@repl") -Selectors.matcher(::Type{Expanders.SetupBlocks}, node, page, doc) = iscode(node, r"^@setup") -Selectors.matcher(::Type{Expanders.RawBlocks}, node, page, doc) = iscode(node, r"^@raw") +Selectors.matcher(::Type{Expanders.ExampleBlocks}, node, page, doc) = iscode(node, r"^@example") +Selectors.matcher(::Type{Expanders.REPLBlocks}, node, page, doc) = iscode(node, r"^@repl") +Selectors.matcher(::Type{Expanders.SetupBlocks}, node, page, doc) = iscode(node, r"^@setup") +Selectors.matcher(::Type{Expanders.RawBlocks}, node, page, doc) = iscode(node, r"^@raw") # Default Expander. @@ -252,22 +256,21 @@ Selectors.runner(::Type{Expanders.NestedExpanderPipeline}, node, page, doc) = no function Selectors.runner(::Type{Expanders.TrackHeaders}, node, page, doc) header = node.element # Get the header slug. - text = - if namedheader(node) - # If the Header is wrappend in an [](@id) link, we remove the Link element from - # the tree. - link_node = first(node.children) - MarkdownAST.unlink!(link_node) - append!(node.children, link_node.children) - match(NAMEDHEADER_REGEX, link_node.element.destination)[1] - else - # TODO: remove this hack (replace with mdflatten?) - ast = MarkdownAST.@ast MarkdownAST.Document() do - MarkdownAST.copy_tree(node) - end - md = convert(Markdown.MD, ast) - sprint(Markdown.plain, Markdown.Paragraph(md.content[1].text)) + text = if namedheader(node) + # If the Header is wrappend in an [](@id) link, we remove the Link element from + # the tree. + link_node = first(node.children) + MarkdownAST.unlink!(link_node) + append!(node.children, link_node.children) + match(NAMEDHEADER_REGEX, link_node.element.destination)[1] + else + # TODO: remove this hack (replace with mdflatten?) + ast = MarkdownAST.@ast MarkdownAST.Document() do + MarkdownAST.copy_tree(node) end + md = convert(Markdown.MD, ast) + sprint(Markdown.plain, Markdown.Paragraph(md.content[1].text)) + end slug = Documenter.slugify(text) # Add the header to the document's header map. anchor = Documenter.anchor_add!(doc.internal.headers, header, slug, page.build) @@ -293,7 +296,16 @@ function Selectors.runner(::Type{Expanders.MetaBlocks}, node, page, doc) # wants to hide. We should probably warn, but it is common enough that # we will silently skip for now. if Documenter.isassign(ex) - if !(ex.args[1] in (:CurrentModule, :DocTestSetup, :DocTestFilters, :EditURL, :Description, :Draft)) + if !( + ex.args[1] in ( + :CurrentModule, + :DocTestSetup, + :DocTestFilters, + :EditURL, + :Description, + :Draft + ) + ) source = Documenter.locrepr(page.source, lines) @warn( "In $source: `@meta` block has an unsupported " * @@ -303,13 +315,17 @@ function Selectors.runner(::Type{Expanders.MetaBlocks}, node, page, doc) try meta[ex.args[1]] = Core.eval(Main, ex.args[2]) catch err - @docerror(doc, :meta_block, + @docerror( + doc, + :meta_block, """ failed to evaluate `$(strip(str))` in `@meta` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """, exception = err) + """, + exception = err + ) end end end @@ -369,34 +385,46 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc) lines = Documenter.find_block_in_file(x.code, page.source) @debug "Evaluating @docs block:\n$(x.code)" for (ex, str) in Documenter.parseblock(x.code, doc, page) - admonition = first(Documenter.mdparse(""" - !!! warning "Missing docstring." + admonition = first( + Documenter.mdparse( + """ +!!! warning "Missing docstring." - Missing docstring for `$(strip(str))`. Check Documenter's build log for details. - """, mode=:blocks)) + Missing docstring for `$(strip(str))`. Check Documenter's build log for details. +""", + mode=:blocks + ) + ) binding = try Documenter.DocSystem.binding(curmod, ex) catch err - @docerror(doc, :docs_block, + @docerror( + doc, + :docs_block, """ unable to get the binding for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines)) from expression '$(repr(ex))' in module $(curmod) ```$(x.info) $(x.code) ``` """, - exception = err) + exception = err + ) push!(docsnodes, admonition) continue end # Undefined `Bindings` get discarded. - if !Documenter.DocSystem.iskeyword(binding) && !Documenter.DocSystem.defined(binding) - @docerror(doc, :docs_block, + if !Documenter.DocSystem.iskeyword(binding) && + !Documenter.DocSystem.defined(binding) + @docerror( + doc, + :docs_block, """ undefined binding '$(binding)' in `@docs` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """) + """ + ) push!(docsnodes, admonition) continue end @@ -404,19 +432,22 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc) object = make_object(binding, typesig, is_canonical, doc, page) # We can't include the same object more than once in a document. if haskey(doc.internal.objects, object) - @docerror(doc, :docs_block, + @docerror( + doc, + :docs_block, """ duplicate docs found for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """) + """ + ) push!(docsnodes, admonition) continue end # Find the docs matching `binding` and `typesig`. Only search within the provided modules. - docs = Documenter.DocSystem.getdocs(binding, typesig; modules = doc.blueprint.modules) + docs = Documenter.DocSystem.getdocs(binding, typesig; modules=doc.blueprint.modules) # Include only docstrings from user-provided modules if provided. if !isempty(doc.blueprint.modules) @@ -425,13 +456,16 @@ function Selectors.runner(::Type{Expanders.DocsBlocks}, node, page, doc) # Check that we aren't printing an empty docs list. Skip block when empty. if isempty(docs) - @docerror(doc, :docs_block, + @docerror( + doc, + :docs_block, """ no docs found for '$(strip(str))' in `@docs` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """) + """ + ) push!(docsnodes, admonition) continue end @@ -462,7 +496,7 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc) x = node.element is_canonical = parse_docs_args("autodocs", x.info) curmod = get(page.globals.meta, :CurrentModule, Main) - fields = Dict{Symbol, Any}() + fields = Dict{Symbol,Any}() lines = Documenter.find_block_in_file(x.code, page.source) @debug "Evaluating @autodocs block:\n$(x.code)" for (ex, str) in Documenter.parseblock(x.code, doc, page) @@ -480,13 +514,17 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc) ) end catch err - @docerror(doc, :autodocs_block, + @docerror( + doc, + :autodocs_block, """ failed to evaluate `$(strip(str))` in `@autodocs` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """, exception = err) + """, + exception = err + ) end end end @@ -509,42 +547,53 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc) Documenter.DocSystem.category(binding) catch err isa(err, UndefVarError) || rethrow(err) - @docerror(doc, :autodocs_block, - """ - @autodocs ($(Documenter.locrepr(page.source, lines))) encountered a bad docstring binding '$(binding)' - ```$(x.info) - $(x.code) - ``` - This is likely due to a bug in the Julia docsystem relating to the handling of - docstrings attached to methods of callable objects. See: - - https://github.com/JuliaLang/julia/issues/45174 - - As a workaround, the docstrings for the functor methods could be included in the docstring - of the type definition. This error can also be ignored by disabling strict checking for - :autodocs_block in the makedocs call with e.g. - - warnonly = [:autodocs_block] - - However, the relevant docstrings will then not be included by the @autodocs block. - """, exception = err) + @docerror( + doc, + :autodocs_block, + """ + @autodocs ($(Documenter.locrepr(page.source, lines))) encountered a bad docstring binding '$(binding)' + ```$(x.info) + $(x.code) + ``` + This is likely due to a bug in the Julia docsystem relating to the handling of + docstrings attached to methods of callable objects. See: + + https://github.com/JuliaLang/julia/issues/45174 + + As a workaround, the docstrings for the functor methods could be included in the docstring + of the type definition. This error can also be ignored by disabling strict checking for + :autodocs_block in the makedocs call with e.g. + + warnonly = [:autodocs_block] + + However, the relevant docstrings will then not be included by the @autodocs block. + """, + exception = err + ) continue # skip this docstring end if category in order && included # filter the elements after category/order has been evaluated # to ensure that e.g. when `Order = [:type]` is given, the filter # function really receives only types - filtered = Base.invokelatest(filterfunc, Core.eval(binding.mod, binding.var)) + filtered = + Base.invokelatest(filterfunc, Core.eval(binding.mod, binding.var)) if filtered for (typesig, docstr) in multidoc.docs path = normpath(docstr.data[:path]) object = make_object(binding, typesig, is_canonical, doc, page) if isempty(pages) - push!(results, (mod, path, category, object, isexported, docstr)) + push!( + results, + (mod, path, category, object, isexported, docstr) + ) else for p in pages if endswith(path, p) - push!(results, (mod, p, category, object, isexported, docstr)) + push!( + results, + (mod, p, category, object, isexported, docstr) + ) break end end @@ -562,23 +611,26 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc) comparison = function (a, b) local t (t = Documenter._compare(modulemap, 1, a, b)) == 0 || return t < 0 # module - (t = Documenter._compare(pagesmap, 2, a, b)) == 0 || return t < 0 # page - (t = Documenter._compare(ordermap, 3, a, b)) == 0 || return t < 0 # category + (t = Documenter._compare(pagesmap, 2, a, b)) == 0 || return t < 0 # page + (t = Documenter._compare(ordermap, 3, a, b)) == 0 || return t < 0 # category string(a[4]) < string(b[4]) # name end - sort!(results; lt = comparison) + sort!(results; lt=comparison) # Finalise docstrings. docsnodes = Node[] for (mod, path, category, object, isexported, docstr) in results if haskey(doc.internal.objects, object) - @docerror(doc, :autodocs_block, + @docerror( + doc, + :autodocs_block, """ duplicate docs found for '$(object.binding)' in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """) + """ + ) continue end markdown::Markdown.MD = Documenter.DocSystem.parsedoc(docstr) @@ -595,13 +647,16 @@ function Selectors.runner(::Type{Expanders.AutoDocsBlocks}, node, page, doc) push!(node.children, docsnode) end else - @docerror(doc, :autodocs_block, + @docerror( + doc, + :autodocs_block, """ '@autodocs' missing 'Modules = ...' in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """) + """ + ) end end @@ -620,24 +675,33 @@ function Selectors.runner(::Type{Expanders.EvalBlocks}, node, page, doc) end sandbox = Module(:EvalBlockSandbox) lines = Documenter.find_block_in_file(x.code, page.source) - linenumbernode = LineNumberNode(lines === nothing ? 0 : lines.first, - basename(page.source)) + linenumbernode = + LineNumberNode(lines === nothing ? 0 : lines.first, basename(page.source)) @debug "Evaluating @eval block:\n$(x.code)" cd(page.workdir) do result = nothing - for (ex, str) in Documenter.parseblock(x.code, doc, page; keywords = false, - linenumbernode = linenumbernode) + for (ex, str) in Documenter.parseblock( + x.code, + doc, + page; + keywords=false, + linenumbernode=linenumbernode + ) try result = Core.eval(sandbox, ex) catch err bt = Documenter.remove_common_backtrace(catch_backtrace()) - @docerror(doc, :eval_block, + @docerror( + doc, + :eval_block, """ failed to evaluate `@eval` block in $(Documenter.locrepr(page.source)) ```$(x.info) $(x.code) ``` - """, exception = (err, bt)) + """, + exception = (err, bt) + ) end end result = if isnothing(result) @@ -647,19 +711,23 @@ function Selectors.runner(::Type{Expanders.EvalBlocks}, node, page, doc) else # TODO: we could handle the cases where the user provides some of the Markdown library # objects, like Paragraph. - @docerror(doc, :eval_block, """ - Invalid type of object in @eval in $(Documenter.locrepr(page.source)) - ```$(x.info) - $(x.code) - ``` - Evaluated to `$(typeof(result))`, but should be one of - - Nothing - - Markdown.MD - Falling back to textual code block representation. - - If you are seeing this warning/error after upgrading Documenter and this used to work, - please open an issue on the Documenter issue tracker. - """) + @docerror( + doc, + :eval_block, + """ +Invalid type of object in @eval in $(Documenter.locrepr(page.source)) +```$(x.info) +$(x.code) +``` +Evaluated to `$(typeof(result))`, but should be one of + - Nothing + - Markdown.MD +Falling back to textual code block representation. + +If you are seeing this warning/error after upgrading Documenter and this used to work, +please open an issue on the Documenter issue tracker. +""" + ) MarkdownAST.@ast MarkdownAST.Document() do MarkdownAST.CodeBlock("", sprint(show, MIME"text/plain"(), result)) end @@ -739,17 +807,23 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc) result, buffer = nothing, IOBuffer() if !continued # run the code # check if there is any code waiting - if haskey(page.globals.meta, :ContinuedCode) && haskey(page.globals.meta[:ContinuedCode], sym) + if haskey(page.globals.meta, :ContinuedCode) && + haskey(page.globals.meta[:ContinuedCode], sym) code = page.globals.meta[:ContinuedCode][sym] * '\n' * x.code delete!(page.globals.meta[:ContinuedCode], sym) else code = x.code end - linenumbernode = LineNumberNode(lines === nothing ? 0 : lines.first, - basename(page.source)) - for (ex, str) in Documenter.parseblock(code, doc, page; keywords = false, - linenumbernode = linenumbernode) - c = IOCapture.capture(rethrow = InterruptException, color = ansicolor) do + linenumbernode = + LineNumberNode(lines === nothing ? 0 : lines.first, basename(page.source)) + for (ex, str) in Documenter.parseblock( + code, + doc, + page; + keywords=false, + linenumbernode=linenumbernode + ) + c = IOCapture.capture(rethrow=InterruptException, color=ansicolor) do cd(page.workdir) do Core.eval(mod, ex) end @@ -759,13 +833,17 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc) print(buffer, c.output) if c.error bt = Documenter.remove_common_backtrace(c.backtrace) - @docerror(doc, :example_block, + @docerror( + doc, + :example_block, """ failed to run `@example` block in $(Documenter.locrepr(page.source, lines)) ```$(x.info) $(x.code) ``` - """, exception = (c.value, bt)) + """, + exception = (c.value, bt) + ) return end end @@ -778,7 +856,7 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc) input = droplines(x.code) # Generate different in different formats and let each writer select - output = Base.invokelatest(Documenter.display_dict, result, context = :color => ansicolor) + output = Base.invokelatest(Documenter.display_dict, result, context=:color => ansicolor) # Remove references to gensym'd module from text/plain m = MIME"text/plain"() if haskey(output, m) @@ -790,7 +868,14 @@ function Selectors.runner(::Type{Expanders.ExampleBlocks}, node, page, doc) if result === nothing stdouterr = Documenter.sanitise(buffer) stdouterr = remove_sandbox_from_output(stdouterr, mod) - isempty(stdouterr) || push!(content, Node(Documenter.MultiOutputElement(Dict{MIME,Any}(MIME"text/plain"() => stdouterr)))) + isempty(stdouterr) || push!( + content, + Node( + Documenter.MultiOutputElement( + Dict{MIME,Any}(MIME"text/plain"() => stdouterr) + ) + ) + ) elseif !isempty(output) push!(content, Node(Documenter.MultiOutputElement(output))) end @@ -837,20 +922,25 @@ function Selectors.runner(::Type{Expanders.REPLBlocks}, node, page, doc) multicodeblock = MarkdownAST.CodeBlock[] linenumbernode = LineNumberNode(0, "REPL") # line unused, set to 0 @debug "Evaluating @repl block:\n$(x.code)" - for (ex, str) in Documenter.parseblock(x.code, doc, page; keywords = false, - linenumbernode = linenumbernode) - input = droplines(str) + for (ex, str) in Documenter.parseblock( + x.code, + doc, + page; + keywords=false, + linenumbernode=linenumbernode + ) + input = droplines(str) # Use the REPL softscope for REPLBlocks, # see https://github.com/JuliaLang/julia/pull/33864 ex = REPL.softscope(ex) - c = IOCapture.capture(rethrow = InterruptException, color = ansicolor) do + c = IOCapture.capture(rethrow=InterruptException, color=ansicolor) do cd(page.workdir) do Core.eval(mod, ex) end end Core.eval(mod, Expr(:global, Expr(:(=), :ans, QuoteNode(c.value)))) result = c.value - buf = IOContext(IOBuffer(), :color=>ansicolor) + buf = IOContext(IOBuffer(), :color => ansicolor) output = if !c.error hide = REPL.ends_with_semicolon(input) result_to_string(buf, hide ? nothing : c.value) @@ -858,7 +948,10 @@ function Selectors.runner(::Type{Expanders.REPLBlocks}, node, page, doc) error_to_string(buf, c.value, []) end if !isempty(input) - push!(multicodeblock, MarkdownAST.CodeBlock("julia-repl", prepend_prompt(input))) + push!( + multicodeblock, + MarkdownAST.CodeBlock("julia-repl", prepend_prompt(input)) + ) end out = IOBuffer() print(out, c.output) # c.output is std(out|err) @@ -908,13 +1001,17 @@ function Selectors.runner(::Type{Expanders.SetupBlocks}, node, page, doc) end catch err bt = Documenter.remove_common_backtrace(catch_backtrace()) - @docerror(doc, :setup_block, + @docerror( + doc, + :setup_block, """ failed to run `@setup` block in $(Documenter.locrepr(page.source)) ```$(x.info) $(x.code) ``` - """, exception=(err, bt)) + """, + exception = (err, bt) + ) end node.element = Documenter.SetupNode(x.info, x.code) end @@ -952,9 +1049,9 @@ function namedheader(node::Node) end # Remove any `# hide` lines, leading/trailing blank lines, and trailing whitespace. -function droplines(code; skip = 0) +function droplines(code; skip=0) buffer = IOBuffer() - for line in split(code, r"\r?\n")[(skip + 1):end] + for line in split(code, r"\r?\n")[(skip+1):end] occursin(r"^(.*)#\s*hide$", line) && continue println(buffer, rstrip(line)) end @@ -964,7 +1061,7 @@ end function prepend_prompt(input) prompt = "julia> " padding = " "^length(prompt) - out = IOBuffer() + out = IOBuffer() for (n, line) in enumerate(split(input, '\n')) line = rstrip(line) println(out, n == 1 ? prompt : padding, line) diff --git a/src/html/HTMLWriter.jl b/src/html/HTMLWriter.jl index f5226a5d58..8dd154a4d3 100644 --- a/src/html/HTMLWriter.jl +++ b/src/html/HTMLWriter.jl @@ -71,12 +71,17 @@ const ASSETS_SASS = joinpath(ASSETS, "scss") const ASSETS_THEMES = joinpath(ASSETS, "themes") struct HTMLAsset - class :: Symbol - uri :: String - islocal :: Bool - attributes::Dict{Symbol, String} - - function HTMLAsset(class::Symbol, uri::String, islocal::Bool, attributes::Dict{Symbol, String}=Dict{Symbol,String}()) + class::Symbol + uri::String + islocal::Bool + attributes::Dict{Symbol,String} + + function HTMLAsset( + class::Symbol, + uri::String, + islocal::Bool, + attributes::Dict{Symbol,String}=Dict{Symbol,String}() + ) if !islocal && match(r"^https?://", uri) === nothing error("Remote asset URL must start with http:// or https://") end @@ -123,7 +128,7 @@ Documenter.HTML(assets = [ ]) ``` """ -function asset(uri; class = nothing, islocal=false, attributes=Dict{Symbol,String}()) +function asset(uri; class=nothing, islocal=false, attributes=Dict{Symbol,String}()) if class === nothing class = assetclass(uri) (class === nothing) && error(""" @@ -137,9 +142,7 @@ end function assetclass(uri) # TODO: support actual proper URIs ext = splitext(uri)[end] - ext == ".ico" ? :ico : - ext == ".css" ? :css : - ext == ".js" ? :js : :unknown + ext == ".ico" ? :ico : ext == ".css" ? :css : ext == ".js" ? :js : :unknown end abstract type MathEngine end @@ -162,13 +165,13 @@ setting `override` to `true`, in which case the default values are ignored and o user-provided dictionary is used. """ struct KaTeX <: MathEngine - config :: Dict{Symbol,Any} - function KaTeX(config::Union{Dict,Nothing} = nothing, override=false) + config::Dict{Symbol,Any} + function KaTeX(config::Union{Dict,Nothing}=nothing, override=false) default = Dict( :delimiters => [ - Dict(:left => raw"$", :right => raw"$", display => false), - Dict(:left => raw"$$", :right => raw"$$", display => true), - Dict(:left => raw"\[", :right => raw"\]", display => true), + Dict(:left => raw"$", :right => raw"$", display => false), + Dict(:left => raw"$$", :right => raw"$$", display => true), + Dict(:left => raw"\[", :right => raw"\]", display => true), ] ) new((config === nothing) ? default : override ? config : merge(default, config)) @@ -197,20 +200,16 @@ The URL of the MathJax JS file can be overridden using the `url` keyword argumen use a particular minor version). """ struct MathJax2 <: MathEngine - config :: Dict{Symbol,Any} - url :: String - function MathJax2(config::Union{Dict,Nothing} = nothing, override=false; url = "") + config::Dict{Symbol,Any} + url::String + function MathJax2(config::Union{Dict,Nothing}=nothing, override=false; url="") default = Dict( :tex2jax => Dict( - "inlineMath" => [["\$","\$"], ["\\(","\\)"]], + "inlineMath" => [["\$", "\$"], ["\\(", "\\)"]], "processEscapes" => true ), :config => ["MMLorHTML.js"], - :jax => [ - "input/TeX", - "output/HTML-CSS", - "output/NativeMML" - ], + :jax => ["input/TeX", "output/HTML-CSS", "output/NativeMML"], :extensions => [ "MathMenu.js", "MathZoom.js", @@ -221,11 +220,17 @@ struct MathJax2 <: MathEngine ], :TeX => Dict(:equationNumbers => Dict(:autoNumber => "AMS")) ) - new((config === nothing) ? default : override ? config : merge(default, config), url) + new( + (config === nothing) ? default : override ? config : merge(default, config), + url + ) end end -@deprecate MathJax(config::Union{Dict,Nothing} = nothing, override=false) MathJax2(config, override) false +@deprecate MathJax(config::Union{Dict,Nothing}=nothing, override=false) MathJax2( + config, + override +) false @doc "deprecated – Use [`MathJax2`](@ref) instead" MathJax """ @@ -252,12 +257,12 @@ The URL of the MathJax JS file can be overridden using the `url` keyword argumen use a particular minor version). """ struct MathJax3 <: MathEngine - config :: Dict{Symbol,Any} - url :: String - function MathJax3(config::Union{Dict,Nothing} = nothing, override=false; url = "") + config::Dict{Symbol,Any} + url::String + function MathJax3(config::Union{Dict,Nothing}=nothing, override=false; url="") default = Dict( :tex => Dict( - "inlineMath" => [["\$","\$"], ["\\(","\\)"]], + "inlineMath" => [["\$", "\$"], ["\\(", "\\)"]], "tags" => "ams", "packages" => ["base", "ams", "autoload"], ), @@ -266,7 +271,10 @@ struct MathJax3 <: MathEngine "processHtmlClass" => "tex2jax_process", ) ) - new((config === nothing) ? default : override ? config : merge(default, config), url) + new( + (config === nothing) ? default : override ? config : merge(default, config), + url + ) end end @@ -436,73 +444,81 @@ their absolute URLs, can be included with the [`asset`](@ref) function. [^1]: Adding an ICO asset is primarily useful for setting a custom `favicon`. """ struct HTML <: Documenter.Writer - prettyurls :: Bool - disable_git :: Bool - edit_link :: Union{String, Symbol, Nothing} - repolink :: Union{String, Nothing, Default{Nothing}} - canonical :: Union{String, Nothing} - assets :: Vector{HTMLAsset} - analytics :: String - collapselevel :: Int - sidebar_sitename :: Bool - highlights :: Vector{String} - mathengine :: Union{MathEngine,Nothing} - description :: Union{String,Nothing} - footer :: Union{MarkdownAST.Node, Nothing} - ansicolor :: Bool - lang :: String - warn_outdated :: Bool - prerender :: Bool - node :: Union{Cmd,String,Nothing} - highlightjs :: Union{String,Nothing} - size_threshold :: Int - size_threshold_warn :: Int - size_threshold_ignore :: Vector{String} - example_size_threshold :: Int + prettyurls::Bool + disable_git::Bool + edit_link::Union{String,Symbol,Nothing} + repolink::Union{String,Nothing,Default{Nothing}} + canonical::Union{String,Nothing} + assets::Vector{HTMLAsset} + analytics::String + collapselevel::Int + sidebar_sitename::Bool + highlights::Vector{String} + mathengine::Union{MathEngine,Nothing} + description::Union{String,Nothing} + footer::Union{MarkdownAST.Node,Nothing} + ansicolor::Bool + lang::String + warn_outdated::Bool + prerender::Bool + node::Union{Cmd,String,Nothing} + highlightjs::Union{String,Nothing} + size_threshold::Int + size_threshold_warn::Int + size_threshold_ignore::Vector{String} + example_size_threshold::Int function HTML(; - prettyurls :: Bool = true, - disable_git :: Bool = false, - repolink :: Union{String, Nothing, Default} = Default(nothing), - edit_link :: Union{String, Symbol, Nothing, Default} = Default(Documenter.git_remote_head_branch("HTML(edit_link = ...)", Documenter.currentdir())), - canonical :: Union{String, Nothing} = nothing, - assets :: Vector = String[], - analytics :: String = "", - collapselevel :: Integer = 2, - sidebar_sitename :: Bool = true, - highlights :: Vector{String} = String[], - mathengine :: Union{MathEngine,Nothing} = KaTeX(), - description :: Union{String, Nothing} = nothing, - footer :: Union{String, Nothing} = "Powered by [Documenter.jl](https://github.com/JuliaDocs/Documenter.jl) and the [Julia Programming Language](https://julialang.org/).", - ansicolor :: Bool = true, - lang :: String = "en", - warn_outdated :: Bool = true, - prerender :: Bool = false, - node :: Union{Cmd,String,Nothing} = nothing, - highlightjs :: Union{String,Nothing} = nothing, - size_threshold :: Union{Integer, Nothing} = 200 * 2^10, # 200 KiB - size_threshold_warn :: Union{Integer, Nothing} = 100 * 2^10, # 100 KiB - size_threshold_ignore :: Vector = String[], - # The choice of the default here is that having ~10 figures on a page - # seems reasonable, and that would lead to ~80 KiB, which is still fine - # and leaves a buffer before hitting `size_threshold_warn`. - example_size_threshold :: Union{Integer, Nothing} = 8 * 2^10, # 8 KiB - - # deprecated keywords - edit_branch :: Union{String, Nothing, Default} = Default(nothing), - ) + prettyurls::Bool=true, + disable_git::Bool=false, + repolink::Union{String,Nothing,Default}=Default(nothing), + edit_link::Union{String,Symbol,Nothing,Default}=Default( + Documenter.git_remote_head_branch( + "HTML(edit_link = ...)", + Documenter.currentdir() + ) + ), + canonical::Union{String,Nothing}=nothing, + assets::Vector=String[], + analytics::String="", + collapselevel::Integer=2, + sidebar_sitename::Bool=true, + highlights::Vector{String}=String[], + mathengine::Union{MathEngine,Nothing}=KaTeX(), + description::Union{String,Nothing}=nothing, + footer::Union{String,Nothing}="Powered by [Documenter.jl](https://github.com/JuliaDocs/Documenter.jl) and the [Julia Programming Language](https://julialang.org/).", + ansicolor::Bool=true, + lang::String="en", + warn_outdated::Bool=true, + prerender::Bool=false, + node::Union{Cmd,String,Nothing}=nothing, + highlightjs::Union{String,Nothing}=nothing, + size_threshold::Union{Integer,Nothing}=200 * 2^10, # 200 KiB + size_threshold_warn::Union{Integer,Nothing}=100 * 2^10, # 100 KiB + size_threshold_ignore::Vector=String[], + # The choice of the default here is that having ~10 figures on a page + # seems reasonable, and that would lead to ~80 KiB, which is still fine + # and leaves a buffer before hitting `size_threshold_warn`. + example_size_threshold::Union{Integer,Nothing}=8 * 2^10, # 8 KiB + + # deprecated keywords + edit_branch::Union{String,Nothing,Default}=Default(nothing), + ) collapselevel >= 1 || throw(ArgumentError("collapselevel must be >= 1")) if prerender - prerender, node, highlightjs = prepare_prerendering(prerender, node, highlightjs, highlights) + prerender, node, highlightjs = + prepare_prerendering(prerender, node, highlightjs, highlights) end assets = map(assets) do asset isa(asset, HTMLAsset) && return asset - isa(asset, AbstractString) && return HTMLAsset(assetclass(asset), asset, true) + isa(asset, AbstractString) && + return HTMLAsset(assetclass(asset), asset, true) error("Invalid value in assets: $(asset) [$(typeof(asset))]") end # Handle edit_branch deprecation if !isa(edit_branch, Default) - isa(edit_link, Default) || error("Can't specify edit_branch (deprecated) and edit_link simultaneously") + isa(edit_link, Default) || + error("Can't specify edit_branch (deprecated) and edit_link simultaneously") @warn """ The edit_branch keyword is deprecated -- use edit_link instead. Note: `edit_branch = nothing` must be changed to `edit_link = :commit`. @@ -515,7 +531,11 @@ struct HTML <: Documenter.Writer if footer !== nothing footer = Markdown.parse(footer) if !(length(footer.content) == 1 && footer.content[1] isa Markdown.Paragraph) - throw(ArgumentError("footer must be a single-line markdown compatible string.")) + throw( + ArgumentError( + "footer must be a single-line markdown compatible string." + ) + ) end footer = isnothing(footer) ? nothing : convert(Node, footer) end @@ -523,25 +543,59 @@ struct HTML <: Documenter.Writer if isnothing(size_threshold) size_threshold = typemax(Int) elseif size_threshold <= 0 - throw(ArgumentError("size_threshold must be non-negative, got $(size_threshold)")) + throw( + ArgumentError("size_threshold must be non-negative, got $(size_threshold)") + ) end if isnothing(size_threshold_warn) size_threshold_warn = min(typemax(Int), size_threshold) elseif size_threshold_warn <= 0 - throw(ArgumentError("size_threshold_warn must be non-negative, got $(size_threshold_warn)")) + throw( + ArgumentError( + "size_threshold_warn must be non-negative, got $(size_threshold_warn)" + ) + ) elseif size_threshold_warn > size_threshold - throw(ArgumentError("size_threshold_warn ($size_threshold_warn) must be smaller than size_threshold ($size_threshold)")) + throw( + ArgumentError( + "size_threshold_warn ($size_threshold_warn) must be smaller than size_threshold ($size_threshold)" + ) + ) end if isnothing(example_size_threshold) example_size_threshold = typemax(Int) elseif example_size_threshold < 0 - throw(ArgumentError("example_size_threshold must be non-negative, got $(example_size_threshold)")) + throw( + ArgumentError( + "example_size_threshold must be non-negative, got $(example_size_threshold)" + ) + ) end isa(edit_link, Default) && (edit_link = edit_link[]) - new(prettyurls, disable_git, edit_link, repolink, canonical, assets, analytics, - collapselevel, sidebar_sitename, highlights, mathengine, description, footer, - ansicolor, lang, warn_outdated, prerender, node, highlightjs, - size_threshold, size_threshold_warn, size_threshold_ignore, example_size_threshold, + new( + prettyurls, + disable_git, + edit_link, + repolink, + canonical, + assets, + analytics, + collapselevel, + sidebar_sitename, + highlights, + mathengine, + description, + footer, + ansicolor, + lang, + warn_outdated, + prerender, + node, + highlightjs, + size_threshold, + size_threshold_warn, + size_threshold_ignore, + example_size_threshold, ) end end @@ -570,7 +624,7 @@ function prepare_prerendering(prerender, node, highlightjs, highlights) @debug "HTMLWriter: downloading highlightjs" r = Documenter.JSDependencies.RequireJS([]) RD.highlightjs!(r, highlights) - libs = sort!(collect(r.libraries); by = first) # puts highlight first + libs = sort!(collect(r.libraries); by=first) # puts highlight first key = join((x.first for x in libs), ',') highlightjs = get!(HLJSFILES, key) do path, io = mktemp() @@ -589,13 +643,13 @@ end include("RD.jl") struct SearchRecord - src :: String - page :: Documenter.Page - fragment :: String - category :: String - title :: String - page_title :: String - text :: String + src::String + page::Documenter.Page + fragment::String + category::String + title::String + page_title::String + text::String end Base.@kwdef struct AtExampleFallbackWarning @@ -609,19 +663,26 @@ end other recursive functions. """ mutable struct HTMLContext - doc :: Documenter.Document - settings :: Union{HTML, Nothing} - scripts :: Vector{String} - documenter_js :: String - themeswap_js :: String - warner_js :: String - search_index :: Vector{SearchRecord} - search_index_js :: String - search_navnode :: Documenter.NavNode + doc::Documenter.Document + settings::Union{HTML,Nothing} + scripts::Vector{String} + documenter_js::String + themeswap_js::String + warner_js::String + search_index::Vector{SearchRecord} + search_index_js::String + search_navnode::Documenter.NavNode atexample_warnings::Vector{AtExampleFallbackWarning} HTMLContext(doc, settings=nothing) = new( - doc, settings, [], "", "", "", [], "", + doc, + settings, + [], + "", + "", + "", + [], + "", Documenter.NavNode("search", "Search", nothing), AtExampleFallbackWarning[], ) @@ -629,24 +690,31 @@ end struct DCtx # ctx and navnode were recursively passed to all domify() methods - ctx :: HTMLContext - navnode :: Documenter.NavNode + ctx::HTMLContext + navnode::Documenter.NavNode # The following fields were keyword arguments to mdconvert() - droplinks :: Bool - settings :: Union{HTML, Nothing} - footnotes :: Union{Vector{Node{Nothing}},Nothing} + droplinks::Bool + settings::Union{HTML,Nothing} + footnotes::Union{Vector{Node{Nothing}},Nothing} DCtx(ctx, navnode, droplinks=false) = new(ctx, navnode, droplinks, ctx.settings, []) DCtx( dctx::DCtx; - navnode = dctx.navnode, - droplinks = dctx.droplinks, - settings = dctx.settings, - footnotes = dctx.footnotes, + navnode=dctx.navnode, + droplinks=dctx.droplinks, + settings=dctx.settings, + footnotes=dctx.footnotes, ) = new(dctx.ctx, navnode, droplinks, settings, footnotes) end -function SearchRecord(ctx::HTMLContext, navnode; fragment="", title=nothing, category="page", text="") +function SearchRecord( + ctx::HTMLContext, + navnode; + fragment="", + title=nothing, + category="page", + text="" +) page_title = mdflatten_pagetitle(DCtx(ctx, navnode)) if title === nothing title = page_title @@ -662,12 +730,20 @@ function SearchRecord(ctx::HTMLContext, navnode; fragment="", title=nothing, cat ) end -function SearchRecord(ctx::HTMLContext, navnode, node::Node, element::Documenter.AnchoredHeader) +function SearchRecord( + ctx::HTMLContext, + navnode, + node::Node, + element::Documenter.AnchoredHeader +) a = element.anchor - SearchRecord(ctx, navnode; + SearchRecord( + ctx, + navnode; fragment=Documenter.anchor_fragment(a), title=mdflatten(node), # AnchoredHeader has Heading as single child - category="section") + category="section" + ) end function SearchRecord(ctx, navnode, node::Node, ::MarkdownAST.AbstractElement) @@ -678,7 +754,7 @@ function JSON.lower(rec::SearchRecord) # Replace any backslashes in links, if building the docs on Windows src = replace(rec.src, '\\' => '/') ref = string(src, rec.fragment) - Dict{String, String}( + Dict{String,String}( "location" => ref, "page" => rec.page_title, "title" => rec.title, @@ -700,10 +776,13 @@ function render(doc::Documenter.Document, settings::HTML=HTML()) if isempty(doc.blueprint.pages) error("Aborting HTML build: no pages under src/") elseif !haskey(doc.blueprint.pages, "index.md") - @warn "Can't generate landing page (index.html): src/index.md missing" keys(doc.blueprint.pages) + @warn "Can't generate landing page (index.html): src/index.md missing" keys( + doc.blueprint.pages + ) end - if isa(settings.repolink, Default) && (isnothing(doc.user.remote) || Remotes.repourl(doc.user.remote) === nothing) + if isa(settings.repolink, Default) && + (isnothing(doc.user.remote) || Remotes.repourl(doc.user.remote) === nothing) @warn """ Unable to determine the repository root URL for the navbar link. This can happen when a string is passed to the `repo` keyword of `makedocs`. @@ -725,7 +804,11 @@ function render(doc::Documenter.Document, settings::HTML=HTML()) @warn "not creating 'documenter.js', provided by the user." else r = JSDependencies.RequireJS([ - RD.jquery, RD.jqueryui, RD.headroom, RD.headroom_jquery, RD.minisearch, + RD.jquery, + RD.jqueryui, + RD.headroom, + RD.headroom_jquery, + RD.minisearch, ]) RD.mathengine!(r, settings.mathengine) if !settings.prerender @@ -746,7 +829,11 @@ function render(doc::Documenter.Document, settings::HTML=HTML()) size_limit_successes = map(collect(keys(doc.blueprint.pages))) do page idx = findfirst(nn -> nn.page == page, doc.internal.navlist) - nn = (idx === nothing) ? Documenter.NavNode(page, nothing, nothing) : doc.internal.navlist[idx] + nn = if (idx === nothing) + Documenter.NavNode(page, nothing, nothing) + else + doc.internal.navlist[idx] + end @debug "Rendering $(page) [$(repr(idx))]" render_page(ctx, nn) end @@ -759,7 +846,7 @@ function render(doc::Documenter.Document, settings::HTML=HTML()) """ fallbacks = unique(w.fallback for w in ctx.atexample_warnings) # We'll impose some regular order, but importantly we want 'nothing'-s on the top - for fallback in sort(fallbacks, by = s -> isnothing(s) ? "" : s) + for fallback in sort(fallbacks, by=s -> isnothing(s) ? "" : s) warnings = filter(w -> w.fallback == fallback, ctx.atexample_warnings) n_warnings = length(warnings) largest_size = maximum(w -> w.size_bytes, warnings) @@ -792,9 +879,12 @@ end struct HTMLSizeThresholdError <: Exception end function Base.showerror(io::IO, ::HTMLSizeThresholdError) - print(io, """ - HTMLSizeThresholdError: Some generated HTML files are above size_threshold. - See logged errors for details.""") + print( + io, + """ +HTMLSizeThresholdError: Some generated HTML files are above size_threshold. +See logged errors for details.""" + ) end """ @@ -851,7 +941,16 @@ end """ Renders the main `` tag. """ -function render_html(ctx, navnode, head, sidebar, navbar, article, footer, scripts::Vector{DOM.Node}=DOM.Node[]) +function render_html( + ctx, + navnode, + head, + sidebar, + navbar, + article, + footer, + scripts::Vector{DOM.Node}=DOM.Node[] +) @tags html body div DOM.HTMLDocument( html[:lang=>ctx.settings.lang]( @@ -884,13 +983,14 @@ function render_settings(ctx) ) ) - now_full, now_short = Dates.format(now(), dateformat"E d U Y HH:MM"), Dates.format(now(), dateformat"E d U Y") + now_full, now_short = Dates.format(now(), dateformat"E d U Y HH:MM"), + Dates.format(now(), dateformat"E d U Y") buildinfo = p( "This document was generated with ", - a[:href => "https://github.com/JuliaDocs/Documenter.jl"]("Documenter.jl"), + a[:href=>"https://github.com/JuliaDocs/Documenter.jl"]("Documenter.jl"), " version $(Documenter.DOCUMENTER_VERSION)", " on ", - span[".colophon-date", :title => now_full](now_short), + span[".colophon-date", :title=>now_full](now_short), ". ", "Using Julia version $(Base.VERSION)." ) @@ -902,9 +1002,7 @@ function render_settings(ctx) p[".modal-card-title"]("Settings"), button[".delete"]() ), - section[".modal-card-body"]( - theme_selector, hr(), buildinfo - ), + section[".modal-card-body"](theme_selector, hr(), buildinfo), footer[".modal-card-foot"]() ) ) @@ -924,27 +1022,22 @@ function render_head(ctx, navnode) default_site_description(ctx) end - css_links = [ - RD.lato, - RD.juliamono, - RD.fontawesome_css..., - RD.katex_css, - ] + css_links = [RD.lato, RD.juliamono, RD.fontawesome_css..., RD.katex_css,] head( meta[:charset=>"UTF-8"], - meta[:name => "viewport", :content => "width=device-width, initial-scale=1.0"], + meta[:name=>"viewport", :content=>"width=device-width, initial-scale=1.0"], # Title tag and meta tags title(page_title), - meta[:name => "title", :content => page_title], - meta[:property => "og:title", :content => page_title], - meta[:property => "twitter:title", :content => page_title], + meta[:name=>"title", :content=>page_title], + meta[:property=>"og:title", :content=>page_title], + meta[:property=>"twitter:title", :content=>page_title], # Description meta tags - meta[:name => "description", :content => description], - meta[:property => "og:description", :content => description], - meta[:property => "twitter:description", :content => description], + meta[:name=>"description", :content=>description], + meta[:property=>"og:description", :content=>description], + meta[:property=>"twitter:description", :content=>description], # Canonical URL tags canonical_url_tags(ctx, navnode), @@ -958,31 +1051,31 @@ function render_head(ctx, navnode) # Stylesheets. map(css_links) do each - link[:href => each, :rel => "stylesheet", :type => "text/css"] + link[:href=>each, :rel=>"stylesheet", :type=>"text/css"] end, - script("documenterBaseURL=\"$(relhref(src, "."))\""), script[ - :src => RD.requirejs_cdn, - Symbol("data-main") => relhref(src, ctx.documenter_js) + :src=>RD.requirejs_cdn, + Symbol("data-main")=>relhref(src, ctx.documenter_js) ], - script[:src => relhref(src, ctx.search_index_js)], - - script[:src => relhref(src, "siteinfo.js")], - script[:src => relhref(src, "../versions.js")], + script[:src=>relhref(src, ctx.search_index_js)], + script[:src=>relhref(src, "siteinfo.js")], + script[:src=>relhref(src, "../versions.js")], # Themes. Note: we reverse the list to make sure that the default theme (first in # the array) comes as the last tag. map(Iterators.reverse(enumerate(THEMES))) do (i, theme) - e = link[".docs-theme-link", - :rel => "stylesheet", :type => "text/css", - :href => relhref(src, "assets/themes/$(theme).css"), - Symbol("data-theme-name") => theme, + e = link[ + ".docs-theme-link", + :rel=>"stylesheet", + :type=>"text/css", + :href=>relhref(src, "assets/themes/$(theme).css"), + Symbol("data-theme-name")=>theme, ] (i == 1) && push!(e.attributes, Symbol("data-theme-primary") => "") (i == 2) && push!(e.attributes, Symbol("data-theme-primary-dark") => "") return e end, - script[:src => relhref(src, ctx.themeswap_js)], + script[:src=>relhref(src, ctx.themeswap_js)], # Custom user-provided assets. asset_links(src, ctx.settings.assets), ) @@ -995,9 +1088,9 @@ function canonical_url_tags(ctx, navnode) return DOM.VOID else tags = DOM.Node[ - meta[:property => "og:url", :content => canonical], - meta[:property => "twitter:url", :content => canonical], - link[:rel => "canonical", :href => canonical] + meta[:property=>"og:url", :content=>canonical], + meta[:property=>"twitter:url", :content=>canonical], + link[:rel=>"canonical", :href=>canonical] ] return tags end @@ -1015,9 +1108,9 @@ function preview_image_meta_tags(ctx) preview = replace(preview, r"[/\\]+" => "/") preview_url = rstrip(canonical_link, '/') * "/" * preview tags = DOM.Node[ - meta[:property => "og:image", :content => preview_url], - meta[:property => "twitter:image", :content => preview_url], - meta[:property => "twitter:card", :content => "summary_large_image"] + meta[:property=>"og:image", :content=>preview_url], + meta[:property=>"twitter:image", :content=>preview_url], + meta[:property=>"twitter:card", :content=>"summary_large_image"] ] return tags end @@ -1046,10 +1139,15 @@ function asset_links(src::AbstractString, assets::Vector{HTMLAsset}) for asset in assets class = asset.class url = asset.islocal ? relhref(src, asset.uri) : asset.uri - node = - class == :ico ? link[:href => url, :rel => "icon", :type => "image/x-icon", pairs(asset.attributes)...] : - class == :css ? link[:href => url, :rel => "stylesheet", :type => "text/css", pairs(asset.attributes)...] : - class == :js ? script[:src => url, pairs(asset.attributes)...] : continue # Skip non-js/css files. + node = if class == :ico + link[:href=>url, :rel=>"icon", :type=>"image/x-icon", pairs(asset.attributes)...] + elseif class == :css + link[:href=>url, :rel=>"stylesheet", :type=>"text/css", pairs(asset.attributes)...] + elseif class == :js + script[:src=>url, pairs(asset.attributes)...] # Skip non-js/css files. + else + continue # Skip non-js/css files. + end # Skip non-js/css files. push!(links, node) end return links @@ -1057,20 +1155,29 @@ end function analytics_script(tracking_id::AbstractString) @tags script - isempty(tracking_id) ? DOM.VOID : [ - script[:async, :src => "https://www.googletagmanager.com/gtag/js?id=$(tracking_id)"](), - script(""" - window.dataLayer = window.dataLayer || []; - function gtag(){dataLayer.push(arguments);} - gtag('js', new Date()); - gtag('config', '$(tracking_id)', {'page_path': location.pathname + location.search + location.hash}); - """) + if isempty(tracking_id) + DOM.VOID + else + [ + script[:async, :src=>"https://www.googletagmanager.com/gtag/js?id=$(tracking_id)"](), + script( + """ + window.dataLayer = window.dataLayer || []; + function gtag(){dataLayer.push(arguments);} + gtag('js', new Date()); + gtag('config', '$(tracking_id)', {'page_path': location.pathname + location.search + location.hash}); + """ + ) ] + end end function warning_script(src, ctx) if ctx.settings.warn_outdated - return Tag(:script)[Symbol(OUTDATED_VERSION_ATTR), :src => relhref(src, ctx.warner_js)]() + return Tag(:script)[ + Symbol(OUTDATED_VERSION_ATTR), + :src=>relhref(src, ctx.warner_js) + ]() end return DOM.VOID end @@ -1079,11 +1186,12 @@ end # ------------------------------------------------------------------------------ struct NavMenuContext - htmlctx :: HTMLContext - current :: Documenter.NavNode - idstack :: Vector{Int} + htmlctx::HTMLContext + current::Documenter.NavNode + idstack::Vector{Int} end -NavMenuContext(ctx::HTMLContext, current::Documenter.NavNode) = NavMenuContext(ctx, current, []) +NavMenuContext(ctx::HTMLContext, current::Documenter.NavNode) = + NavMenuContext(ctx, current, []) function render_sidebar(ctx, navnode) @tags a form img input nav div button select option span @@ -1098,25 +1206,37 @@ function render_sidebar(ctx, navnode) logo_dark = find_image_asset(ctx, "logo-dark") if logo !== nothing alt = isempty(ctx.doc.user.sitename) ? "Logo" : "$(ctx.doc.user.sitename) logo" - logo_element = a[".docs-logo", :href => href] + logo_element = a[".docs-logo", :href=>href] if logo_dark === nothing - push!(logo_element.nodes, img[:src => relhref(src, logo), :alt => alt]) + push!(logo_element.nodes, img[:src=>relhref(src, logo), :alt=>alt]) else - push!(logo_element.nodes, img[".docs-light-only", :src => relhref(src, logo), :alt => alt]) - push!(logo_element.nodes, img[".docs-dark-only", :src => relhref(src, logo_dark), :alt => alt]) + push!( + logo_element.nodes, + img[".docs-light-only", :src=>relhref(src, logo), :alt=>alt] + ) + push!( + logo_element.nodes, + img[".docs-dark-only", :src=>relhref(src, logo_dark), :alt=>alt] + ) end push!(navmenu.nodes, logo_element) end # Sitename if ctx.settings.sidebar_sitename - push!(navmenu.nodes, div[".docs-package-name"]( - span[".docs-autofit"](a[:href => href](ctx.doc.user.sitename)) - )) + push!( + navmenu.nodes, + div[".docs-package-name"]( + span[".docs-autofit"](a[:href=>href](ctx.doc.user.sitename)) + ) + ) end # Search box - push!(navmenu.nodes, - button["#documenter-search-query.docs-search-query.input.is-rounded.is-small.is-clickable.my-2.mx-auto.py-1.px-2"]("Search docs (Ctrl + /)") + push!( + navmenu.nodes, + button["#documenter-search-query.docs-search-query.input.is-rounded.is-small.is-clickable.my-2.mx-auto.py-1.px-2"]( + "Search docs (Ctrl + /)" + ) ) # The menu itself @@ -1132,7 +1252,7 @@ function render_sidebar(ctx, navnode) vs_select = select["#documenter-version-selector"] if !isempty(ctx.doc.user.version) vs_class = "$(vs_class).visible" - opt = option[:value => "#", :selected => "selected", ](ctx.doc.user.version) + opt = option[:value=>"#", :selected=>"selected",](ctx.doc.user.version) vs_select = vs_select(opt) end vs_select = div[".select.is-fullwidth.is-size-7"](vs_select) @@ -1166,7 +1286,8 @@ function navitem(nctx, nns::Vector) end pop!(nctx.idstack) filter!(node -> node.name !== DOM.TEXT, nodes) # FIXME: why? - ulclass = (length(nctx.idstack) >= nctx.htmlctx.settings.collapselevel) ? ".collapsed" : "" + ulclass = + (length(nctx.idstack) >= nctx.htmlctx.settings.collapselevel) ? ".collapsed" : "" isempty(nodes) ? DOM.Node("") : DOM.Tag(:ul)[ulclass](nodes) end function navitem(nctx, nn::Documenter.NavNode) @@ -1184,19 +1305,23 @@ function navitem(nctx, nn::Documenter.NavNode) # construct this item title = domify(dctx, pagetitle(dctx)) currentclass = (nn === current) ? ".is-active" : "" - item = if length(nctx.idstack) >= ctx.settings.collapselevel && children.name !== DOM.TEXT - menuid = "menuitem-$(join(nctx.idstack, '-'))" - input_attr = ["#$(menuid).collapse-toggle", :type => "checkbox"] - nn in Documenter.navpath(nctx.current) && push!(input_attr, :checked) - li[currentclass]( - input[input_attr...], - label[".tocitem", :for => menuid](span[".docs-label"](title), i[".docs-chevron"]), - ) - elseif nn.page === nothing - li[currentclass](span[".tocitem"](title)) - else - li[currentclass](a[".tocitem", :href => navhref(ctx, nn, current)](title)) - end + item = + if length(nctx.idstack) >= ctx.settings.collapselevel && children.name !== DOM.TEXT + menuid = "menuitem-$(join(nctx.idstack, '-'))" + input_attr = ["#$(menuid).collapse-toggle", :type => "checkbox"] + nn in Documenter.navpath(nctx.current) && push!(input_attr, :checked) + li[currentclass]( + input[input_attr...], + label[".tocitem", :for=>menuid]( + span[".docs-label"](title), + i[".docs-chevron"] + ), + ) + elseif nn.page === nothing + li[currentclass](span[".tocitem"](title)) + else + li[currentclass](a[".tocitem", :href=>navhref(ctx, nn, current)](title)) + end # add the subsections (2nd level headings) from the page if (nn === current) && current.page !== nothing @@ -1204,7 +1329,7 @@ function navitem(nctx, nn::Documenter.NavNode) internal_links = map(subs) do s istoplevel, anchor, text = s _li = istoplevel ? li[".toplevel"] : li[] - _li(a[".tocitem", :href => anchor](span(domify(dctx, text.children)))) + _li(a[".tocitem", :href=>anchor](span(domify(dctx, text.children)))) end # Only create the ul.internal tag if there actually are in-page headers length(internal_links) > 0 && push!(item.nodes, ul[".internal"](internal_links)) @@ -1222,7 +1347,7 @@ function render_navbar(ctx, navnode, edit_page_link::Bool) # Hamburger on mobile navbar_left = a[ "#documenter-sidebar-button.docs-sidebar-button.docs-navbar-link.fa-solid.fa-bars.is-hidden-desktop", - :href => "#", + :href=>"#", ] # The breadcrumb (navigation links on top) @@ -1230,7 +1355,11 @@ function render_navbar(ctx, navnode, edit_page_link::Bool) header_links = map(navpath) do nn dctx = DCtx(ctx, nn, true) title = domify(dctx, pagetitle(dctx)) - nn.page === nothing ? li(a[".is-disabled"](title)) : li(a[:href => navhref(ctx, nn, navnode)](title)) + if nn.page === nothing + li(a[".is-disabled"](title)) + else + li(a[:href=>navhref(ctx, nn, navnode)](title)) + end end header_links[end] = header_links[end][".is-active"] breadcrumb = nav[".breadcrumb"]( @@ -1247,7 +1376,8 @@ function render_navbar(ctx, navnode, edit_page_link::Bool) # is not displayed. The user can also pass `repolink` to HTML to either disable it # (repolink = nothing) or override the link URL (if set to a string). In the latter case, # we try to figure out what icon and string we should use based on the URL. - if !isnothing(ctx.settings.repolink) && (ctx.settings.repolink isa String || ctx.doc.user.remote isa Remotes.Remote) + if !isnothing(ctx.settings.repolink) && + (ctx.settings.repolink isa String || ctx.doc.user.remote isa Remotes.Remote) url, (host, logo) = if ctx.settings.repolink isa String ctx.settings.repolink, host_logo(ctx.settings.repolink) else # ctx.doc.user.remote isa Remotes.Remote @@ -1256,10 +1386,13 @@ function render_navbar(ctx, navnode, edit_page_link::Bool) # repourl() can sometimes return a nothing (Remotes.URL) if !isnothing(url) repo_title = "View the repository" * (isempty(host) ? "" : " on $host") - push!(navbar_right.nodes, - a[".docs-navbar-link", :href => url, :title => repo_title]( + push!( + navbar_right.nodes, + a[".docs-navbar-link", :href=>url, :title=>repo_title]( span[".docs-icon.fa-brands"](logo), - span[".docs-label.is-hidden-touch"](isempty(host) ? "Repository" : host) + span[".docs-label.is-hidden-touch"]( + isempty(host) ? "Repository" : host + ) ) ) end @@ -1267,24 +1400,33 @@ function render_navbar(ctx, navnode, edit_page_link::Bool) # Add an edit link, with just an icon, but only on pages where edit_page_link is true. # Some pages, like search, are special and do not have a source file to link to. edit_page_link && edit_link(ctx, navnode) do logo, title, url - push!(navbar_right.nodes, - a[".docs-navbar-link", :href => url, :title => title]( + push!( + navbar_right.nodes, + a[".docs-navbar-link", :href=>url, :title=>title]( span[".docs-icon.fa-solid"](logo) ) ) end # Settings cog - push!(navbar_right.nodes, a[ - "#documenter-settings-button.docs-settings-button.docs-navbar-link.fa-solid.fa-gear", - :href => "#", :title => "Settings", - ]) + push!( + navbar_right.nodes, + a[ + "#documenter-settings-button.docs-settings-button.docs-navbar-link.fa-solid.fa-gear", + :href=>"#", + :title=>"Settings", + ] + ) # Collapse/Expand All articles toggle - push!(navbar_right.nodes, a[ - "#documenter-article-toggle-button.docs-article-toggle-button.fa-solid.fa-chevron-up", - :href=>"javascript:;", :title=>"Collapse all docstrings", - ]) + push!( + navbar_right.nodes, + a[ + "#documenter-article-toggle-button.docs-article-toggle-button.fa-solid.fa-chevron-up", + :href=>"javascript:;", + :title=>"Collapse all docstrings", + ] + ) # Construct the main
node that should be the first element in div.docs-main header[".docs-navbar"](navbar_left, breadcrumb, navbar_right) @@ -1298,7 +1440,8 @@ function edit_link(f, ctx, navnode) # Let's fetch the edit path. Usually this is the source file of the page, but the user # can override it specifying the EditURL option in an @meta block. Usually, it is a # relative path pointing to a file, but can also be set to an absolute URL. - editpath = get(getpage(ctx, navnode).globals.meta, :EditURL, getpage(ctx, navnode).source) + editpath = + get(getpage(ctx, navnode).globals.meta, :EditURL, getpage(ctx, navnode).source) # If the user has set :EditURL to nothing, then the link will be disabled. Note: the # .source field of a Page is always a String. isnothing(editpath) && return @@ -1344,20 +1487,26 @@ function edit_link(f, ctx, navnode) end # All these logos are from the .fa-brands (brands) class -const host_logo_github = (host = "GitHub", logo = "\uf09b") # fa-github -const host_logo_bitbucket = (host = "BitBucket", logo = "\uf171") # fa-bitbucket -const host_logo_gitlab = (host = "GitLab", logo = "\uf296") # fa-gitlab -const host_logo_azure = (host = "Azure DevOps", logo = "\uf3ca") # fa-microsoft; TODO: change to ADO logo when added to FontAwesome -const host_logo_fallback = (host = "", logo = "\uf841") # fa-git-alt +const host_logo_github = (host="GitHub", logo="\uf09b") # fa-github +const host_logo_bitbucket = (host="BitBucket", logo="\uf171") # fa-bitbucket +const host_logo_gitlab = (host="GitLab", logo="\uf296") # fa-gitlab +const host_logo_azure = (host="Azure DevOps", logo="\uf3ca") # fa-microsoft; TODO: change to ADO logo when added to FontAwesome +const host_logo_fallback = (host="", logo="\uf841") # fa-git-alt host_logo(remote::Remotes.GitHub) = host_logo_github host_logo(remote::Remotes.URL) = host_logo(remote.urltemplate) host_logo(remote::Union{Remotes.Remote,Nothing}) = host_logo_fallback function host_logo(remoteurl::String) - occursin("github", remoteurl) ? host_logo_github : - occursin("gitlab", remoteurl) ? host_logo_gitlab : - occursin("bitbucket", remoteurl) ? host_logo_bitbucket : - occursin("azure", remoteurl) ? host_logo_azure : - host_logo_fallback + if occursin("github", remoteurl) + host_logo_github + elseif occursin("gitlab", remoteurl) + host_logo_gitlab + elseif occursin("bitbucket", remoteurl) + host_logo_bitbucket + elseif occursin("azure", remoteurl) + host_logo_azure + else + host_logo_fallback + end end function render_footer(ctx, navnode) @@ -1367,13 +1516,19 @@ function render_footer(ctx, navnode) if navnode.prev !== nothing dctx = DCtx(ctx, navnode.prev, true) title = domify(dctx, pagetitle(dctx)) - link = a[".docs-footer-prevpage", :href => navhref(ctx, navnode.prev, navnode)]("« ", title) + link = a[".docs-footer-prevpage", :href=>navhref(ctx, navnode.prev, navnode)]( + "« ", + title + ) push!(navlinks, link) end if navnode.next !== nothing dctx = DCtx(ctx, navnode.next, true) title = domify(dctx, pagetitle(dctx)) - link = a[".docs-footer-nextpage", :href => navhref(ctx, navnode.next, navnode)](title, " »") + link = a[".docs-footer-nextpage", :href=>navhref(ctx, navnode.next, navnode)]( + title, + " »" + ) push!(navlinks, link) end @@ -1409,17 +1564,18 @@ function render_article(ctx, navnode) if !isempty(dctx.footnotes) fnotes = map(dctx.footnotes) do f # If there are any nested footnotes, they'll get ignored. - dctx_footnote = DCtx(dctx, footnotes = nothing) + dctx_footnote = DCtx(dctx, footnotes=nothing) fid = "footnote-$(f.element.id)" citerefid = "citeref-$(f.element.id)" - if length(f.children) == 1 && first(f.children).element isa MarkdownAST.Paragraph + if length(f.children) == 1 && + first(f.children).element isa MarkdownAST.Paragraph li["#$(fid).footnote"]( - a[".tag.is-link", :href => "#$(citerefid)"](f.element.id), + a[".tag.is-link", :href=>"#$(citerefid)"](f.element.id), domify(dctx_footnote, first(f.children).children), ) else li["#$(fid).footnote"]( - a[".tag.is-link", :href => "#$(citerefid)"](f.element.id), + a[".tag.is-link", :href=>"#$(citerefid)"](f.element.id), # passing an empty MD() as `parent` to give it block context domify(dctx_footnote, f.children), ) @@ -1444,13 +1600,18 @@ function expand_versions(dir, versions) # filter and sort release folders vnum(x) = VersionNumber(x) version_folders = [x for x in available_folders if occursin(Base.VERSION_REGEX, x)] - sort!(version_folders, lt = (x, y) -> vnum(x) < vnum(y), rev = true) - release_folders = filter(x -> (v = vnum(x); v.prerelease == () && v.build == ()), version_folders) + sort!(version_folders, lt=(x, y) -> vnum(x) < vnum(y), rev=true) + release_folders = + filter(x -> (v = vnum(x); v.prerelease == () && v.build == ()), version_folders) # pre_release_folders = filter(x -> (v = vnum(x); v.prerelease != () || v.build != ()), version_folders) - major_folders = filter!(x -> (v = vnum(x); v.major != 0), - unique(x -> (v = vnum(x); v.major), release_folders)) - minor_folders = filter!(x -> (v = vnum(x); !(v.major == 0 && v.minor == 0)), - unique(x -> (v = vnum(x); (v.major, v.minor)), release_folders)) + major_folders = filter!( + x -> (v = vnum(x); v.major != 0), + unique(x -> (v = vnum(x); v.major), release_folders) + ) + minor_folders = filter!( + x -> (v = vnum(x); !(v.major == 0 && v.minor == 0)), + unique(x -> (v = vnum(x); (v.major, v.minor)), release_folders) + ) patch_folders = unique(x -> (v = vnum(x); (v.major, v.minor, v.patch)), release_folders) filter!(x -> vnum(x) !== 0, major_folders) @@ -1500,7 +1661,10 @@ function expand_versions(dir, versions) # generate remaining symlinks foreach(x -> push!(symlinks, "v$(vnum(x).major)" => x), major_folders) foreach(x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor)" => x), minor_folders) - foreach(x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor).$(vnum(x).patch)" => x), patch_folders) + foreach( + x -> push!(symlinks, "v$(vnum(x).major).$(vnum(x).minor).$(vnum(x).patch)" => x), + patch_folders + ) filter!(x -> x.first != x.second, unique!(symlinks)) # assert that none of the links point to another link @@ -1515,7 +1679,7 @@ function expand_versions(dir, versions) end # write version file -function generate_version_file(versionfile::AbstractString, entries, symlinks = []) +function generate_version_file(versionfile::AbstractString, entries, symlinks=[]) open(versionfile, "w") do buf println(buf, "var DOC_VERSIONS = [") for folder in entries @@ -1553,7 +1717,10 @@ function generate_redirect_file(redirectfile::AbstractString, entries) open(redirectfile, "w") do buf println(buf, comment) - println(buf, "") + println( + buf, + "" + ) end end @@ -1616,18 +1783,19 @@ end domify(dctx::DCtx, node::Node, ::MarkdownAST.Document) = domify(dctx, node.children) function domify(dctx::DCtx, node::Node, ah::Documenter.AnchoredHeader) - @assert length(node.children) == 1 && isa(first(node.children).element, MarkdownAST.Heading) + @assert length(node.children) == 1 && + isa(first(node.children).element, MarkdownAST.Heading) ctx, navnode = dctx.ctx, dctx.navnode anchor = ah.anchor # function domify(ctx, navnode, anchor::Anchor) @tags a frag = Documenter.anchor_fragment(anchor) - legacy = anchor.nth == 1 ? (a[:id => lstrip(frag, '#')*"-1"],) : () + legacy = anchor.nth == 1 ? (a[:id=>lstrip(frag, '#')*"-1"],) : () h = first(node.children) - Tag(Symbol("h$(h.element.level)"))[:id => lstrip(frag, '#')]( - a[".docs-heading-anchor", :href => frag](domify(dctx, h.children)), + Tag(Symbol("h$(h.element.level)"))[:id=>lstrip(frag, '#')]( + a[".docs-heading-anchor", :href=>frag](domify(dctx, h.children)), legacy..., - a[".docs-heading-anchor-permalink", :href => frag, :title => "Permalink"] + a[".docs-heading-anchor-permalink", :href=>frag, :title=>"Permalink"] ) end @@ -1645,7 +1813,7 @@ function push!(lb::ListBuilder, level, node) if isempty(lb.es) || typeof(last(lb.es)) !== ListBuilder push!(lb.es, ListBuilder()) end - push!(last(lb.es), level-1, node) + push!(last(lb.es), level - 1, node) end end @@ -1700,17 +1868,26 @@ function domify(dctx::DCtx, mdast_node::Node, node::Documenter.DocsNode) @tags a code article header span # push to search index - rec = SearchRecord(ctx, navnode; + rec = SearchRecord( + ctx, + navnode; fragment=Documenter.anchor_fragment(node.anchor), title=string(node.object.binding), category=Documenter.doccat(node.object), - text = mdflatten(mdast_node)) + text=mdflatten(mdast_node) + ) push!(ctx.search_index, rec) article[".docstring"]( header( - a[".docstring-article-toggle-button.fa-solid.fa-chevron-down", :href=>"javascript:;", :title=>"Collapse docstring"], - a[".docstring-binding", :id=>node.anchor.id, :href=>"#$(node.anchor.id)"](code("$(node.object.binding)")), + a[ + ".docstring-article-toggle-button.fa-solid.fa-chevron-down", + :href=>"javascript:;", + :title=>"Collapse docstring" + ], + a[".docstring-binding", :id=>node.anchor.id, :href=>"#$(node.anchor.id)"]( + code("$(node.object.binding)") + ), " — ", # — span[".docstring-category"]("$(Documenter.doccat(node.object))") ), @@ -1732,7 +1909,10 @@ function domify_doc(dctx::DCtx, node::Node) if !ctx.settings.disable_git url = Documenter.source_url(ctx.doc, result) if url !== nothing - push!(ret.nodes, a[".docs-sourcelink", :target=>"_blank", :href=>url]("source")) + push!( + ret.nodes, + a[".docs-sourcelink", :target=>"_blank", :href=>url]("source") + ) end end return ret @@ -1761,7 +1941,11 @@ Prints a warning/error if the page goes over the `size_threshold` or `size_thres limits, and in the former case also returns `false`, to report back to the caller that the size threshold check failed. """ -function write_html(ctx::HTMLContext, navnode::Documenter.NavNode, page_html::DOM.HTMLDocument) :: Bool +function write_html( + ctx::HTMLContext, + navnode::Documenter.NavNode, + page_html::DOM.HTMLDocument +)::Bool page_path = get_url(ctx, navnode) buf = IOBuffer() print(buf, page_html) @@ -1813,7 +1997,7 @@ function format_units(size) end end - return string(round(size, digits = 2), " (", unit, ")") + return string(round(size, digits=2), " (", unit, ")") end """ @@ -1857,7 +2041,11 @@ size threshold, and returns the filename (that should be in the same directory a corresponding HTML file). If the data is under the threshold, no file is created, and the function returns `nothing`. """ -function write_data_file(dctx::DCtx, data::Union{Vector{UInt8},AbstractString}; suffix::AbstractString) +function write_data_file( + dctx::DCtx, + data::Union{Vector{UInt8},AbstractString}; + suffix::AbstractString +) ctx, navnode = dctx.ctx, dctx.navnode # If we're under the threshold, we return `nothing`, indicating to the caller that # they should inline the file instead. @@ -1905,17 +2093,19 @@ function data_filename(dctx::DCtx, slug::AbstractString, suffix::AbstractString) string(pagename, "-", slug) end # Now we need to find a valid file name, in case there are existing duplicates. - filename = find_valid_data_file(joinpath(ctx.doc.user.build, dir), filename_prefix, suffix) - return (; - filename, - path = joinpath(ctx.doc.user.build, dir, filename), - ) + filename = + find_valid_data_file(joinpath(ctx.doc.user.build, dir), filename_prefix, suffix) + return (; filename, path=joinpath(ctx.doc.user.build, dir, filename),) end -function find_valid_data_file(directory::AbstractString, prefix::AbstractString, suffix::AbstractString) +function find_valid_data_file( + directory::AbstractString, + prefix::AbstractString, + suffix::AbstractString +) # We'll try 10_000 different filename.. if this doesn't work, then something is probably really # badly wrong, and so we just crash. - for i in 0:10_000 + for i = 0:10_000 filename = if i == 0 string(prefix, suffix) else @@ -2016,7 +2206,8 @@ function pagetitle(dctx::DCtx) [MarkdownAST.@ast("-")] end -mdflatten_pagetitle(dctx::DCtx) = sprint((io, ns) -> foreach(n -> mdflatten(io, n), ns), pagetitle(dctx)) +mdflatten_pagetitle(dctx::DCtx) = + sprint((io, ns) -> foreach(n -> mdflatten(io, n), ns), pagetitle(dctx)) """ Returns an ordered list of tuples, `(toplevel, anchor, text)`, corresponding to level 1 and 2 @@ -2048,10 +2239,10 @@ function collect_subsections(page::MarkdownAST.Node) return sections end -function domify_ansicoloredtext(text::AbstractString, class = "") +function domify_ansicoloredtext(text::AbstractString, class="") @tags pre stack = DOM.Node[pre()] # this `pre` is dummy - function cb(io::IO, printer, tag::String, attrs::Dict{Symbol, String}) + function cb(io::IO, printer, tag::String, attrs::Dict{Symbol,String}) text = String(take!(io)) children = stack[end].nodes isempty(text) || push!(children, Tag(Symbol("#RAW#"))(text)) @@ -2065,8 +2256,12 @@ function domify_ansicoloredtext(text::AbstractString, class = "") return true end ansiclass = isempty(class) ? "ansi" : class * " ansi" - printer = ANSIColoredPrinters.HTMLPrinter(IOBuffer(text), callback = cb, - root_tag = "code", root_class = ansiclass) + printer = ANSIColoredPrinters.HTMLPrinter( + IOBuffer(text), + callback=cb, + root_tag="code", + root_class=ansiclass + ) show(IOBuffer(), MIME"text/html"(), printer) return stack[1].nodes end @@ -2087,9 +2282,11 @@ function domify(dctx::DCtx, node::Node, e::MarkdownAST.Text) return DOM.Node(text) end -domify(dctx::DCtx, node::Node, ::MarkdownAST.BlockQuote) = Tag(:blockquote)(domify(dctx, node.children)) +domify(dctx::DCtx, node::Node, ::MarkdownAST.BlockQuote) = + Tag(:blockquote)(domify(dctx, node.children)) -domify(dctx::DCtx, node::Node, ::MarkdownAST.Strong) = Tag(:strong)(domify(dctx, node.children)) +domify(dctx::DCtx, node::Node, ::MarkdownAST.Strong) = + Tag(:strong)(domify(dctx, node.children)) function domify(dctx::DCtx, node::Node, c::MarkdownAST.CodeBlock) ctx, navnode, settings = dctx.ctx, dctx.navnode, dctx.settings @@ -2099,7 +2296,8 @@ function domify(dctx::DCtx, node::Node, c::MarkdownAST.CodeBlock) language = Documenter.codelang(language) if language == "documenter-ansi" # From @repl blocks (through MultiCodeBlock) return pre(domify_ansicoloredtext(c.code, "nohighlight hljs")) - elseif settings !== nothing && settings.prerender && + elseif settings !== nothing && + settings.prerender && !(isempty(language) || language == "nohighlight") r = hljs_prerender(c, settings) r !== nothing && return r @@ -2121,7 +2319,8 @@ function domify(dctx::DCtx, node::Node, mcb::Documenter.MultiCodeBlock) push!(p.nodes, code) # insert a
between output and the next input if i != length(node.children) && - findnext(x -> x.element.info == mcb.language, collect(node.children), i + 1) == i + 1 + findnext(x -> x.element.info == mcb.language, collect(node.children), i + 1) == + i + 1 push!(p.nodes, br()) end end @@ -2147,7 +2346,7 @@ function hljs_prerender(c::MarkdownAST.CodeBlock, settings::HTML) # return pre(code[".nohighlight $(lang) .hljs"](Tag(Symbol("#RAW#"))(str))) return pre(code[".language-$(lang) .hljs"](Tag(Symbol("#RAW#"))(str))) catch e - @error "HTMLWriter: prerendering failed" exception=e stderr=String(take!(err)) + @error "HTMLWriter: prerendering failed" exception = e stderr = String(take!(err)) end return nothing end @@ -2159,7 +2358,7 @@ end domify(dctx::DCtx, node::Node, ::MarkdownAST.ThematicBreak) = Tag(:hr)() -const ImageElements = Union{MarkdownAST.Image, Documenter.LocalImage} +const ImageElements = Union{MarkdownAST.Image,Documenter.LocalImage} function domify(dctx::DCtx, node::Node, i::ImageElements) ctx, navnode = dctx.ctx, dctx.navnode alt = mdflatten(node.children) @@ -2169,30 +2368,30 @@ function domify(dctx::DCtx, node::Node, i::ImageElements) @tags video img a if occursin(r"\.(webm|mp4|ogg|ogm|ogv|avi)$", url) - video[:src => url, :controls => "true", :title => alt]( - a[:href => url](alt) - ) + video[:src=>url, :controls=>"true", :title=>alt](a[:href=>url](alt)) else - img[:src => url, :alt => alt] + img[:src=>url, :alt=>alt] end end domify(dctx::DCtx, node::Node, ::MarkdownAST.Emph) = Tag(:em)(domify(dctx, node.children)) -domify(dctx::DCtx, node::Node, m::MarkdownAST.DisplayMath) = Tag(:p)[".math-container"](string("\\[", m.math, "\\]")) +domify(dctx::DCtx, node::Node, m::MarkdownAST.DisplayMath) = + Tag(:p)[".math-container"](string("\\[", m.math, "\\]")) -domify(dctx::DCtx, node::Node, m::MarkdownAST.InlineMath) = Tag(:span)(string('$', m.math, '$')) +domify(dctx::DCtx, node::Node, m::MarkdownAST.InlineMath) = + Tag(:span)(string('$', m.math, '$')) domify(dctx::DCtx, node::Node, m::MarkdownAST.LineBreak) = Tag(:br)() # TODO: Implement SoftBreak, Backslash (but they don't appear in standard library Markdown conversions) -const LinkElements = Union{MarkdownAST.Link, Documenter.PageLink, Documenter.LocalLink} +const LinkElements = Union{MarkdownAST.Link,Documenter.PageLink,Documenter.LocalLink} function domify(dctx::DCtx, node::Node, link::LinkElements) droplinks = dctx.droplinks url = filehref(dctx, node, link) # function mdconvert(link::Markdown.Link, parent; droplinks=false, kwargs...) link_text = domify(dctx, node.children) - droplinks ? link_text : Tag(:a)[:href => url](link_text) + droplinks ? link_text : Tag(:a)[:href=>url](link_text) end function domify(dctx::DCtx, node::Node, list::MarkdownAST.List) @@ -2208,8 +2407,11 @@ function domify(dctx::DCtx, node::Node, ::MarkdownAST.Paragraph) # See also: https://github.com/JuliaLang/julia/pull/26598 is_in_tight_list(node) ? content : Tag(:p)(content) end -is_in_tight_list(node::Node) = !isnothing(node.parent) && isa(node.parent.element, MarkdownAST.Item) && - !isnothing(node.parent.parent) && isa(node.parent.parent.element, MarkdownAST.List) && +is_in_tight_list(node::Node) = + !isnothing(node.parent) && + isa(node.parent.element, MarkdownAST.Item) && + !isnothing(node.parent.parent) && + isa(node.parent.parent.element, MarkdownAST.List) && node.parent.parent.element.tight function domify(dctx::DCtx, node::Node, t::MarkdownAST.Table) @@ -2227,11 +2429,11 @@ function domify(dctx::DCtx, node::Node, t::MarkdownAST.Table) end table( tr(map(enumerate(th_row.children)) do (i, x) - th[:style => alignment_style[i]](domify(dctx, x.children)) + th[:style=>alignment_style[i]](domify(dctx, x.children)) end), map(tbody_rows) do x tr(map(enumerate(x.children)) do (i, y) # each cell in a row - td[:style => alignment_style[i]](domify(dctx, y.children)) + td[:style=>alignment_style[i]](domify(dctx, y.children)) end) end ) @@ -2253,7 +2455,9 @@ end function domify(dctx::DCtx, node::Node, f::MarkdownAST.FootnoteLink) @tags sup a - sup[".footnote-reference"](a["#citeref-$(f.id)", :href => "#footnote-$(f.id)"]("[$(f.id)]")) + sup[".footnote-reference"]( + a["#citeref-$(f.id)", :href=>"#footnote-$(f.id)"]("[$(f.id)]") + ) end function domify(dctx::DCtx, node::Node, f::MarkdownAST.FootnoteDefinition) # As we run through the document to generate the document, we won't render the footnote @@ -2276,50 +2480,55 @@ end function domify(dctx::DCtx, node::Node, a::MarkdownAST.Admonition) @tags header div details summary - colorclass = - (a.category == "danger") ? ".is-danger" : - (a.category == "warning") ? ".is-warning" : - (a.category == "note") ? ".is-info" : - (a.category == "info") ? ".is-info" : - (a.category == "tip") ? ".is-success" : - (a.category == "compat") ? ".is-compat" : begin - # If the admonition category is not one of the standard ones, we tag the - # admonition div element with a `is-category-$(category)` class. However, we - # first carefully sanitize the category name. Strictly speaking, this is not - # necessary when were using the Markdown parser in the Julia standard library, - # since it restricts the category to [a-z]+. But it is possible for the users to - # construct their own Admonition objects with arbitrary category strings and - # pass them onto Documenter. - # - # (1) remove all characters except A-Z, a-z, 0-9 and - - cat_sanitized = replace(a.category, r"[^A-Za-z0-9-]" => "") - # (2) remove any dashes from the beginning and end of the string - cat_sanitized = replace(cat_sanitized, r"^[-]+" => "") - cat_sanitized = replace(cat_sanitized, r"[-]+$" => "") - # (3) reduce any duplicate dashes in the middle to single dashes - cat_sanitized = replace(cat_sanitized, r"[-]+" => "-") - cat_sanitized = lowercase(cat_sanitized) - # (4) if nothing is left (or the category was empty to begin with), we don't - # apply a class - isempty(cat_sanitized) ? "" : ".is-category-$(cat_sanitized)" - end + colorclass = if (a.category == "danger") + ".is-danger" + elseif (a.category == "warning") + ".is-warning" + elseif (a.category == "note") + ".is-info" + elseif (a.category == "info") + ".is-info" + elseif (a.category == "tip") + ".is-success" + elseif (a.category == "compat") + ".is-compat" + else + begin + # If the admonition category is not one of the standard ones, we tag the + # admonition div element with a `is-category-$(category)` class. However, we + # first carefully sanitize the category name. Strictly speaking, this is not + # necessary when were using the Markdown parser in the Julia standard library, + # since it restricts the category to [a-z]+. But it is possible for the users to + # construct their own Admonition objects with arbitrary category strings and + # pass them onto Documenter. + # + # (1) remove all characters except A-Z, a-z, 0-9 and - + cat_sanitized = replace(a.category, r"[^A-Za-z0-9-]" => "") + # (2) remove any dashes from the beginning and end of the string + cat_sanitized = replace(cat_sanitized, r"^[-]+" => "") + cat_sanitized = replace(cat_sanitized, r"[-]+$" => "") + # (3) reduce any duplicate dashes in the middle to single dashes + cat_sanitized = replace(cat_sanitized, r"[-]+" => "-") + cat_sanitized = lowercase(cat_sanitized) + # (4) if nothing is left (or the category was empty to begin with), we don't + # apply a class + isempty(cat_sanitized) ? "" : ".is-category-$(cat_sanitized)" + end + end inner_div = div[".admonition-body"](domify(dctx, node.children)) if a.category == "details" # details admonitions are rendered as
blocks - details[".admonition.is-details"]( - summary[".admonition-header"](a.title), inner_div - ) + details[".admonition.is-details"](summary[".admonition-header"](a.title), inner_div) else - div[".admonition$(colorclass)"]( - header[".admonition-header"](a.title), inner_div - ) + div[".admonition$(colorclass)"](header[".admonition-header"](a.title), inner_div) end end # Select the "best" representation for HTML output. domify(dctx::DCtx, node::Node, ::Documenter.MultiOutput) = domify(dctx, node.children) -domify(dctx::DCtx, node::Node, moe::Documenter.MultiOutputElement) = Base.invokelatest(domify, dctx, node, moe.element) +domify(dctx::DCtx, node::Node, moe::Documenter.MultiOutputElement) = + Base.invokelatest(domify, dctx, node, moe.element) function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any}) rawhtml(code) = Tag(Symbol("#RAW#"))(code) @@ -2344,7 +2553,7 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any}) dom = if length(svg) >= dctx.ctx.settings.example_size_threshold filename = write_data_file(dctx, svg; suffix=".svg") @assert !isnothing(filename) - img[:src => filename, :alt => "Example block output"] + img[:src=>filename, :alt=>"Example block output"] elseif svg_tag_match === nothing # There is no svg tag so we don't do any more advanced # processing and just return the svg as HTML. @@ -2358,7 +2567,11 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any}) svg_tag = svg_tag_match.match xmlns_present = occursin("xmlns", svg_tag) if !xmlns_present - svg = replace(svg, " " "")) end - (; dom = dom, mime = "image/svg+xml") + (; dom=dom, mime="image/svg+xml") elseif haskey(d, MIME"image/png"()) domify_show_image_binary(dctx, "png", d) elseif haskey(d, MIME"image/webp"()) @@ -2402,20 +2615,26 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any}) if has_text_html && isnothing(image) # The 'text/html' representation of an @example block is above the threshold, but no # supported image representation is present as an alternative. - push!(dctx.ctx.atexample_warnings, AtExampleFallbackWarning( - page = dctx.navnode.page, - size_bytes = length(d[MIME"text/html"()]), - fallback = nothing, - )) + push!( + dctx.ctx.atexample_warnings, + AtExampleFallbackWarning( + page=dctx.navnode.page, + size_bytes=length(d[MIME"text/html"()]), + fallback=nothing, + ) + ) return rawhtml(d[MIME"text/html"()]) elseif has_text_html && !isnothing(image) # The 'text/html' representation of an @example block is above the threshold, # falling back to '$(image.mime)' representation. - push!(dctx.ctx.atexample_warnings, AtExampleFallbackWarning( - page = dctx.navnode.page, - size_bytes = length(d[MIME"text/html"()]), - fallback = image.mime, - )) + push!( + dctx.ctx.atexample_warnings, + AtExampleFallbackWarning( + page=dctx.navnode.page, + size_bytes=length(d[MIME"text/html"()]), + fallback=image.mime, + ) + ) return image.dom elseif !has_text_html && !isnothing(image) return image.dom @@ -2428,7 +2647,7 @@ function domify(dctx::DCtx, node::Node, d::Dict{MIME,Any}) # Markdown.LaTeX objects we put the correct delimiters around it anyway. has_math, latex = _strip_latex_math_delimiters(latex) out = if !has_math - Documenter.mdparse(latex; mode = :single) + Documenter.mdparse(latex; mode=:single) else [MarkdownAST.@ast MarkdownAST.DisplayMath(latex)] end @@ -2473,11 +2692,11 @@ function domify_show_image_binary(dctx::DCtx, filetype::AbstractString, d::Dict{ alt = (:alt => "Example block output") dom = if isnothing(filename) src = string("data:$(mime_name);base64,", data_base64) - img[:src => src, alt] + img[:src=>src, alt] else - img[:src => filename, alt] + img[:src=>filename, alt] end - (; dom, mime = mime_name) + (; dom, mime=mime_name) end # filehrefs @@ -2486,7 +2705,8 @@ end # If the nodes passed through CrossReferences as native MarkdownAST elements, then that # means they're reasonable absolute URLs. Or, possibly, the URL is problematic, but we # just ignore that here. That should have been caught earlier. -filehref(dctx::DCtx, node::Node, e::Union{MarkdownAST.Image, MarkdownAST.Link}) = e.destination +filehref(dctx::DCtx, node::Node, e::Union{MarkdownAST.Image,MarkdownAST.Link}) = + e.destination function filehref(dctx::DCtx, node::Node, e::Documenter.PageLink) ctx, navnode = dctx.ctx, dctx.navnode diff --git a/src/html/RD.jl b/src/html/RD.jl index b05c3985ca..672d58d770 100644 --- a/src/html/RD.jl +++ b/src/html/RD.jl @@ -1,78 +1,114 @@ "Provides a namespace for remote dependencies." module RD - using JSON - using ....Documenter.JSDependencies: RemoteLibrary, Snippet, RequireJS, jsescape, json_jsescape - using ..HTMLWriter: KaTeX, MathJax, MathJax2, MathJax3 +using JSON +using ....Documenter.JSDependencies: + RemoteLibrary, Snippet, RequireJS, jsescape, json_jsescape +using ..HTMLWriter: KaTeX, MathJax, MathJax2, MathJax3 - const requirejs_cdn = "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js" - const lato = "https://cdnjs.cloudflare.com/ajax/libs/lato-font/3.0.0/css/lato-font.min.css" - const juliamono = "https://cdnjs.cloudflare.com/ajax/libs/juliamono/0.050/juliamono.min.css" - const fontawesome_version = "6.4.2" - const fontawesome_css = [ - "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/fontawesome.min.css", - "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/solid.min.css", - "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/brands.min.css", - ] +const requirejs_cdn = "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js" +const lato = "https://cdnjs.cloudflare.com/ajax/libs/lato-font/3.0.0/css/lato-font.min.css" +const juliamono = "https://cdnjs.cloudflare.com/ajax/libs/juliamono/0.050/juliamono.min.css" +const fontawesome_version = "6.4.2" +const fontawesome_css = [ + "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/fontawesome.min.css", + "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/solid.min.css", + "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/$(fontawesome_version)/css/brands.min.css", +] - const jquery = RemoteLibrary("jquery", "https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.0/jquery.min.js") - const jqueryui = RemoteLibrary("jqueryui", "https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.13.2/jquery-ui.min.js") - const lodash = RemoteLibrary("lodash", "https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js") - const minisearch = RemoteLibrary("minisearch", "https://cdn.jsdelivr.net/npm/minisearch@6.1.0/dist/umd/index.min.js") +const jquery = RemoteLibrary( + "jquery", + "https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.0/jquery.min.js" +) +const jqueryui = RemoteLibrary( + "jqueryui", + "https://cdnjs.cloudflare.com/ajax/libs/jqueryui/1.13.2/jquery-ui.min.js" +) +const lodash = RemoteLibrary( + "lodash", + "https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.21/lodash.min.js" +) +const minisearch = RemoteLibrary( + "minisearch", + "https://cdn.jsdelivr.net/npm/minisearch@6.1.0/dist/umd/index.min.js" +) - # headroom - const headroom_version = "0.12.0" - const headroom = RemoteLibrary("headroom", "https://cdnjs.cloudflare.com/ajax/libs/headroom/$(headroom_version)/headroom.min.js") - const headroom_jquery = RemoteLibrary( - "headroom-jquery", - "https://cdnjs.cloudflare.com/ajax/libs/headroom/$(headroom_version)/jQuery.headroom.min.js", - deps = ["jquery", "headroom"], - ) +# headroom +const headroom_version = "0.12.0" +const headroom = RemoteLibrary( + "headroom", + "https://cdnjs.cloudflare.com/ajax/libs/headroom/$(headroom_version)/headroom.min.js" +) +const headroom_jquery = RemoteLibrary( + "headroom-jquery", + "https://cdnjs.cloudflare.com/ajax/libs/headroom/$(headroom_version)/jQuery.headroom.min.js", + deps=["jquery", "headroom"], +) - # highlight.js - "Add the highlight.js dependencies and snippet to a [`RequireJS`](@ref) declaration." - function highlightjs!(r::RequireJS, languages = String[]) - # NOTE: the CSS themes for hightlightjs are compiled into the Documenter CSS - # When updating this dependency, it is also necessary to update the the CSS - # files the CSS files in assets/html/scss/highlightjs - hljs_version = "11.8.0" - push!(r, RemoteLibrary( +# highlight.js +"Add the highlight.js dependencies and snippet to a [`RequireJS`](@ref) declaration." +function highlightjs!(r::RequireJS, languages=String[]) + # NOTE: the CSS themes for hightlightjs are compiled into the Documenter CSS + # When updating this dependency, it is also necessary to update the the CSS + # files the CSS files in assets/html/scss/highlightjs + hljs_version = "11.8.0" + push!( + r, + RemoteLibrary( "highlight", "https://cdnjs.cloudflare.com/ajax/libs/highlight.js/$(hljs_version)/highlight.min.js" - )) - languages = ["julia", "julia-repl", languages...] - for language in languages - language = jsescape(language) - push!(r, RemoteLibrary( + ) + ) + languages = ["julia", "julia-repl", languages...] + for language in languages + language = jsescape(language) + push!( + r, + RemoteLibrary( "highlight-$(language)", "https://cdnjs.cloudflare.com/ajax/libs/highlight.js/$(hljs_version)/languages/$(language).min.js", - deps = ["highlight"] - )) - end - push!(r, Snippet( - vcat(["jquery", "highlight"], ["highlight-$(jsescape(language))" for language in languages]), + deps=["highlight"] + ) + ) + end + push!( + r, + Snippet( + vcat( + ["jquery", "highlight"], + ["highlight-$(jsescape(language))" for language in languages] + ), ["\$"], raw""" $(document).ready(function() { hljs.highlightAll(); }) """ - )) - end + ) + ) +end - # MathJax & KaTeX - const katex_version = "0.16.8" - const katex_css = "https://cdnjs.cloudflare.com/ajax/libs/KaTeX/$(katex_version)/katex.min.css" - function mathengine!(r::RequireJS, engine::KaTeX) - push!(r, RemoteLibrary( +# MathJax & KaTeX +const katex_version = "0.16.8" +const katex_css = "https://cdnjs.cloudflare.com/ajax/libs/KaTeX/$(katex_version)/katex.min.css" +function mathengine!(r::RequireJS, engine::KaTeX) + push!( + r, + RemoteLibrary( "katex", "https://cdnjs.cloudflare.com/ajax/libs/KaTeX/$(katex_version)/katex.min.js" - )) - push!(r, RemoteLibrary( + ) + ) + push!( + r, + RemoteLibrary( "katex-auto-render", "https://cdnjs.cloudflare.com/ajax/libs/KaTeX/$(katex_version)/contrib/auto-render.min.js", - deps = ["katex"], - )) - push!(r, Snippet( + deps=["katex"], + ) + ) + push!( + r, + Snippet( ["jquery", "katex", "katex-auto-render"], ["\$", "katex", "renderMathInElement"], """ @@ -83,35 +119,47 @@ module RD ); }) """ - )) + ) + ) +end +function mathengine!(r::RequireJS, engine::MathJax2) + url = if isempty(engine.url) + "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.9/MathJax.js?config=TeX-AMS_HTML" + else + engine.url end - function mathengine!(r::RequireJS, engine::MathJax2) - url = isempty(engine.url) ? "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.9/MathJax.js?config=TeX-AMS_HTML" : engine.url - push!(r, RemoteLibrary( - "mathjax", - url, - exports = "MathJax" - )) - push!(r, Snippet(["mathjax"], ["MathJax"], + push!(r, RemoteLibrary("mathjax", url, exports="MathJax")) + push!( + r, + Snippet( + ["mathjax"], + ["MathJax"], """ MathJax.Hub.Config($(json_jsescape(engine.config, 2))); """ - )) + ) + ) +end +function mathengine!(r::RequireJS, engine::MathJax3) + url = if isempty(engine.url) + "https://cdnjs.cloudflare.com/ajax/libs/mathjax/3.2.2/es5/tex-svg.js" + else + engine.url end - function mathengine!(r::RequireJS, engine::MathJax3) - url = isempty(engine.url) ? "https://cdnjs.cloudflare.com/ajax/libs/mathjax/3.2.2/es5/tex-svg.js" : engine.url - push!(r, Snippet([], [], - """ - window.MathJax = $(json_jsescape(engine.config, 2)); + push!(r, Snippet( + [], + [], + """ + window.MathJax = $(json_jsescape(engine.config, 2)); - (function () { - var script = document.createElement('script'); - script.src = '$url'; - script.async = true; - document.head.appendChild(script); - })(); - """ - )) - end - mathengine(::RequireJS, ::Nothing) = nothing + (function () { + var script = document.createElement('script'); + script.src = '$url'; + script.async = true; + document.head.appendChild(script); + })(); + """ + )) +end +mathengine(::RequireJS, ::Nothing) = nothing end diff --git a/src/latex/LaTeXWriter.jl b/src/latex/LaTeXWriter.jl index 8a88411796..f10ef2d02b 100644 --- a/src/latex/LaTeXWriter.jl +++ b/src/latex/LaTeXWriter.jl @@ -53,10 +53,12 @@ struct LaTeX <: Documenter.Writer version::String tectonic::Union{Cmd,String,Nothing} function LaTeX(; - platform = "native", - version = get(ENV, "TRAVIS_TAG", ""), - tectonic = nothing) - platform ∈ ("native", "tectonic", "docker", "none") || throw(ArgumentError("unknown platform: $platform")) + platform="native", + version=get(ENV, "TRAVIS_TAG", ""), + tectonic=nothing + ) + platform ∈ ("native", "tectonic", "docker", "none") || + throw(ArgumentError("unknown platform: $platform")) return new(platform, string(version), tectonic) end end @@ -65,10 +67,10 @@ import ..Documenter import Markdown import ANSIColoredPrinters -mutable struct Context{I <: IO} <: IO +mutable struct Context{I<:IO} <: IO io::I in_header::Bool - footnotes::Dict{String, Int} + footnotes::Dict{String,Int} depth::Int filename::String # currently active source file doc::Documenter.Document @@ -85,9 +87,16 @@ _hash(x) = string(hash(x)) const STYLE = joinpath(dirname(@__FILE__), "..", "..", "assets", "latex", "documenter.sty") -const DEFAULT_PREAMBLE_PATH = joinpath(dirname(@__FILE__), "..", "..", "assets", "latex", "preamble.tex") +const DEFAULT_PREAMBLE_PATH = + joinpath(dirname(@__FILE__), "..", "..", "assets", "latex", "preamble.tex") -hastex() = (try; success(`latexmk -version`); catch; false; end) +hastex() = ( + try + success(`latexmk -version`) + catch + false + end +) const DOCUMENT_STRUCTURE = ( "part", @@ -138,8 +147,11 @@ function render(doc::Documenter.Document, settings::LaTeX=LaTeX()) # Debug: if DOCUMENTER_LATEX_DEBUG environment variable is set, copy the LaTeX # source files over to a directory under doc.user.root. if haskey(ENV, "DOCUMENTER_LATEX_DEBUG") - dst = isempty(ENV["DOCUMENTER_LATEX_DEBUG"]) ? mktempdir(doc.user.root; cleanup=false) : + dst = if isempty(ENV["DOCUMENTER_LATEX_DEBUG"]) + mktempdir(doc.user.root; cleanup=false) + else joinpath(doc.user.root, ENV["DOCUMENTER_LATEX_DEBUG"]) + end sources = cp(pwd(), dst, force=true) @info "LaTeX sources copied for debugging to $(sources)" end @@ -147,9 +159,9 @@ function render(doc::Documenter.Document, settings::LaTeX=LaTeX()) # If the build was successful, copy the PDF or the LaTeX source to the .build directory if status && (settings.platform != "none") pdffile = "$(fileprefix).pdf" - cp(pdffile, joinpath(doc.user.root, doc.user.build, pdffile); force = true) + cp(pdffile, joinpath(doc.user.root, doc.user.build, pdffile); force=true) elseif status && (settings.platform == "none") - cp(pwd(), joinpath(doc.user.root, doc.user.build); force = true) + cp(pwd(), joinpath(doc.user.root, doc.user.build); force=true) else error("Compiling the .tex file failed. See logs for more information.") end @@ -170,32 +182,43 @@ const DOCKER_IMAGE_TAG = "0.1" function compile_tex(doc::Documenter.Document, settings::LaTeX, fileprefix::String) if settings.platform == "native" - Sys.which("latexmk") === nothing && (@error "LaTeXWriter: latexmk command not found."; return false) + Sys.which("latexmk") === nothing && + (@error "LaTeXWriter: latexmk command not found."; return false) @info "LaTeXWriter: using latexmk to compile tex." try - piperun(`latexmk -f -interaction=batchmode -halt-on-error -view=none -lualatex -shell-escape $(fileprefix).tex`, clearlogs = true) + piperun( + `latexmk -f -interaction=batchmode -halt-on-error -view=none -lualatex -shell-escape $(fileprefix).tex`, + clearlogs=true + ) return true catch err logs = cp(pwd(), mktempdir(; cleanup=false); force=true) @error "LaTeXWriter: failed to compile tex with latexmk. " * - "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = err + "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = + err return false end elseif settings.platform == "tectonic" @info "LaTeXWriter: using tectonic to compile tex." tectonic = isnothing(settings.tectonic) ? Sys.which("tectonic") : settings.tectonic - isnothing(tectonic) && (@error "LaTeXWriter: tectonic command not found."; return false) + isnothing(tectonic) && + (@error "LaTeXWriter: tectonic command not found."; return false) try - piperun(`$(tectonic) -X compile --keep-logs -Z shell-escape $(fileprefix).tex`, clearlogs = true) + piperun( + `$(tectonic) -X compile --keep-logs -Z shell-escape $(fileprefix).tex`, + clearlogs=true + ) return true catch err logs = cp(pwd(), mktempdir(; cleanup=false); force=true) @error "LaTeXWriter: failed to compile tex with tectonic. " * - "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = err + "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = + err return false end elseif settings.platform == "docker" - Sys.which("docker") === nothing && (@error "LaTeXWriter: docker command not found."; return false) + Sys.which("docker") === nothing && + (@error "LaTeXWriter: docker command not found."; return false) @info "LaTeXWriter: using docker to compile tex." script = """ mkdir /home/zeptodoctor/build @@ -204,17 +227,24 @@ function compile_tex(doc::Documenter.Document, settings::LaTeX, fileprefix::Stri latexmk -f -interaction=batchmode -halt-on-error -view=none -lualatex -shell-escape $(fileprefix).tex """ try - piperun(`docker run -itd -u zeptodoctor --name latex-container -v $(pwd()):/mnt/ --rm juliadocs/documenter-latex:$(DOCKER_IMAGE_TAG)`, clearlogs = true) + piperun( + `docker run -itd -u zeptodoctor --name latex-container -v $(pwd()):/mnt/ --rm juliadocs/documenter-latex:$(DOCKER_IMAGE_TAG)`, + clearlogs=true + ) piperun(`docker exec -u zeptodoctor latex-container bash -c $(script)`) piperun(`docker cp latex-container:/home/zeptodoctor/build/$(fileprefix).pdf .`) return true catch err logs = cp(pwd(), mktempdir(; cleanup=false); force=true) @error "LaTeXWriter: failed to compile tex with docker. " * - "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = err + "Logs and partial output can be found in $(Documenter.locrepr(logs))" exception = + err return false finally - try; piperun(`docker stop latex-container`); catch; end + try + piperun(`docker stop latex-container`) + catch + end end elseif settings.platform == "none" @info "Skipping compiling tex file." @@ -222,38 +252,44 @@ function compile_tex(doc::Documenter.Document, settings::LaTeX, fileprefix::Stri end end -function piperun(cmd; clearlogs = false) +function piperun(cmd; clearlogs=false) verbose = "--verbose" in ARGS || get(ENV, "DOCUMENTER_VERBOSE", "false") == "true" - run(verbose ? cmd : pipeline( - cmd, - stdout = "LaTeXWriter.stdout", - stderr = "LaTeXWriter.stderr", - append = !clearlogs, - )) + run( + if verbose + cmd + else + pipeline( + cmd, + stdout = "LaTeXWriter.stdout", + stderr = "LaTeXWriter.stderr", + append = !clearlogs, + ) + end + ) end function writeheader(io::IO, doc::Documenter.Document, settings::LaTeX) custom = joinpath(doc.user.root, doc.user.source, "assets", "custom.sty") - isfile(custom) ? cp(custom, "custom.sty"; force = true) : touch("custom.sty") + isfile(custom) ? cp(custom, "custom.sty"; force=true) : touch("custom.sty") - custom_preamble_file = joinpath(doc.user.root, doc.user.source, "assets", "preamble.tex") + custom_preamble_file = + joinpath(doc.user.root, doc.user.source, "assets", "preamble.tex") if isfile(custom_preamble_file) # copy custom preamble. - cp(custom_preamble_file, "preamble.tex"; force = true) + cp(custom_preamble_file, "preamble.tex"; force=true) else # no custom preamble.tex, use default. - cp(DEFAULT_PREAMBLE_PATH, "preamble.tex"; force = true) - end - preamble = - """ - % Useful variables - \\newcommand{\\DocMainTitle}{$(doc.user.sitename)} - \\newcommand{\\DocVersion}{$(settings.version)} - \\newcommand{\\DocAuthors}{$(doc.user.authors)} - \\newcommand{\\JuliaVersion}{$(VERSION)} - - % ---- Insert preamble - \\input{preamble.tex} - """ + cp(DEFAULT_PREAMBLE_PATH, "preamble.tex"; force=true) + end + preamble = """ + % Useful variables + \\newcommand{\\DocMainTitle}{$(doc.user.sitename)} + \\newcommand{\\DocVersion}{$(settings.version)} + \\newcommand{\\DocAuthors}{$(doc.user.authors)} + \\newcommand{\\JuliaVersion}{$(VERSION)} + + % ---- Insert preamble + \\input{preamble.tex} + """ # output preamble _println(io, preamble) end @@ -269,7 +305,7 @@ istoplevel(n::Node) = !isnothing(n.parent) && isa(n.parent.element, MarkdownAST. latex(io::Context, node::Node) = latex(io, node, node.element) latex(io::Context, node::Node, e) = error("$(typeof(e)) not implemented: $e") -function latex(io::Context, children; toplevel = false) +function latex(io::Context, children; toplevel=false) @assert eltype(children) <: MarkdownAST.Node for node in children otherelement = !isa(node.element, NoExtraTopLevelNewlines) @@ -292,14 +328,14 @@ function latex(io::Context, node::Node, ah::Documenter.AnchoredHeader) anchor = ah.anchor # latex(io::IO, anchor::Anchor, page, doc) id = _hash(Documenter.anchor_label(anchor)) - latex(io, node.children; toplevel = istoplevel(node)) + latex(io, node.children; toplevel=istoplevel(node)) _println(io, "\n\\label{", id, "}{}\n") end ## Documentation Nodes. function latex(io::Context, node::Node, ::Documenter.DocsNodesBlock) - latex(io, node.children; toplevel = istoplevel(node)) + latex(io, node.children; toplevel=istoplevel(node)) end function latex(io::Context, node::Node, docs::Documenter.DocsNode) @@ -371,7 +407,7 @@ function latex(io::Context, node::Node, contents::Documenter.ContentsNode) # If we're changing depth, we need to make sure we always print the # correct number of \begin{itemize} and \end{itemize} statements. if level > depth - for k in 1:(level - depth) + for k = 1:(level-depth) # if we jump by more than one level deeper we need to put empty # \items in -- otherwise LaTeX will complain (k >= 2) && _println(io, "\\item ~") @@ -379,7 +415,7 @@ function latex(io::Context, node::Node, contents::Documenter.ContentsNode) depth += 1 end elseif level < depth - for _ in 1:(depth - level) + for _ = 1:(depth-level) _println(io, "\\end{itemize}") depth -= 1 end @@ -391,13 +427,15 @@ function latex(io::Context, node::Node, contents::Documenter.ContentsNode) _println(io, "}") end # print any remaining missing \end{itemize} statements - for _ = 1:depth; _println(io, "\\end{itemize}"); end + for _ = 1:depth + _println(io, "\\end{itemize}") + end _println(io) end function latex(io::Context, node::Node, evalnode::Documenter.EvalNode) if evalnode.result !== nothing - latex(io, evalnode.result.children, toplevel = true) + latex(io, evalnode.result.children, toplevel=true) end end @@ -411,20 +449,26 @@ function latex(io::Context, ::Node, d::Dict{MIME,Any}) filename = String(rand('a':'z', 7)) if haskey(d, MIME"image/png"()) write("$(filename).png", base64decode(d[MIME"image/png"()])) - _println(io, """ - \\begin{figure}[H] - \\centering - \\includegraphics[max width=\\linewidth]{$(filename)} - \\end{figure} - """) + _println( + io, + """ +\\begin{figure}[H] +\\centering +\\includegraphics[max width=\\linewidth]{$(filename)} +\\end{figure} +""" + ) elseif haskey(d, MIME"image/jpeg"()) write("$(filename).jpeg", base64decode(d[MIME"image/jpeg"()])) - _println(io, """ - \\begin{figure}[H] - \\centering - \\includegraphics[max width=\\linewidth]{$(filename)} - \\end{figure} - """) + _println( + io, + """ +\\begin{figure}[H] +\\centering +\\includegraphics[max width=\\linewidth]{$(filename)} +\\end{figure} +""" + ) elseif haskey(d, MIME"text/latex"()) # If it has a latex MIME, just write it out directly. content = d[MIME("text/latex")] @@ -481,11 +525,7 @@ function latex(io::Context, node::Node, heading::MarkdownAST.Heading) end # Whitelisted lexers. -const LEXER = Set([ - "julia", - "jlcon", - "text", -]) +const LEXER = Set(["julia", "jlcon", "text",]) function latex(io::Context, node::Node, code::MarkdownAST.CodeBlock) language = Documenter.codelang(code.info) @@ -505,7 +545,10 @@ function latex(io::Context, node::Node, code::MarkdownAST.CodeBlock) if language == "text/plain" _print(io, escape ? "," : "[") # Special-case the formatting of code outputs from Julia. - _println(io, "xleftmargin=-\\fboxsep,xrightmargin=-\\fboxsep,bgcolor=white,frame=single]{text}") + _println( + io, + "xleftmargin=-\\fboxsep,xrightmargin=-\\fboxsep,bgcolor=white,frame=single]{text}" + ) else _println(io, escape ? "]{" : "{", language, "}") end @@ -518,7 +561,8 @@ function latex(io::Context, node::Node, code::MarkdownAST.CodeBlock) return end -latex(io::Context, node::Node, mcb::Documenter.MultiCodeBlock) = latex(io, node, join_multiblock(node)) +latex(io::Context, node::Node, mcb::Documenter.MultiCodeBlock) = + latex(io, node, join_multiblock(node)) function join_multiblock(node::Node) @assert node.element isa Documenter.MultiCodeBlock io = IOBuffer() @@ -537,10 +581,15 @@ end function _print_code_escapes_minted(io, s::AbstractString) for ch in s - ch === '#' ? _print(io, "##%") : - ch === '%' ? _print(io, "#%%") : # Note: "#\\%%" results in pygmentize error... - ch === '⊻' ? _print(io, "#\\unicodeveebar%") : - _print(io, ch) + if ch === '#' + _print(io, "##%") + elseif ch === '%' + _print(io, "#%%") # Note: "#\\%%" results in pygmentize error... + elseif ch === '⊻' + _print(io, "#\\unicodeveebar%") + else + _print(io, ch) + end end end @@ -552,8 +601,7 @@ end function _print_code_escapes_inline(io, s::AbstractString) for ch in s - ch === '⊻' ? _print(io, "\\unicodeveebar{}") : - latexesc(io, ch) + ch === '⊻' ? _print(io, "\\unicodeveebar{}") : latexesc(io, ch) end end @@ -697,7 +745,8 @@ function latex(io::Context, node::Node, image::MarkdownAST.Image) # TODO: also print the .title field somehow wrapblock(io, "figure") do _println(io, "\\centering") - @warn "images with absolute URLs not supported in LaTeX output in $(Documenter.locrepr(io.filename))" url = image.destination + @warn "images with absolute URLs not supported in LaTeX output in $(Documenter.locrepr(io.filename))" url = + image.destination # We nevertheless output an \includegraphics with the URL. The LaTeX build will # then give an error, indicating to the user that something wrong. url = replace(image.destination, "\\" => "/") # use / on Windows too. @@ -809,7 +858,7 @@ latex(io::Context, node::Node, ::MarkdownAST.LineBreak) = _println(io, "\\\\") # Documenter. -const _latexescape_chars = Dict{Char, AbstractString}( +const _latexescape_chars = Dict{Char,AbstractString}( '~' => "{\\textasciitilde}", '\u00A0' => "~", # nonbreaking space '^' => "{\\textasciicircum}", @@ -873,6 +922,6 @@ function files!(out, p::Pair{<:AbstractString,<:Any}, depth) return out end -files(v::Vector) = files!(Tuple{String, String, Int}[], v, 0) +files(v::Vector) = files!(Tuple{String,String,Int}[], v, 0) end diff --git a/src/makedocs.jl b/src/makedocs.jl index 68aca25a0c..796ec5d868 100644 --- a/src/makedocs.jl +++ b/src/makedocs.jl @@ -238,15 +238,17 @@ information. A guide detailing how to document a package using Documenter's [`makedocs`](@ref) is provided in the [setup guide in the manual](@ref Package-Guide). """ -function makedocs(; debug = false, format = HTML(), kwargs...) +function makedocs(; debug=false, format=HTML(), kwargs...) document = Documenter.Document(; format=format, kwargs...) # Before starting the build pipeline, we empty out the subtype cache used by # Selectors.dispatch. This is to make sure that we pick up any new selector stages that # may have been added to the selector pipelines between makedocs calls. empty!(Selectors.selector_subtypes) - cd(document.user.root) do; withenv(NO_KEY_ENV...) do - Selectors.dispatch(Builder.DocumentPipeline, document) - end end + cd(document.user.root) do + withenv(NO_KEY_ENV...) do + Selectors.dispatch(Builder.DocumentPipeline, document) + end + end debug ? document : nothing end @@ -272,10 +274,12 @@ $(join(Ref("`:") .* string.(ERROR_NAMES) .* Ref("`"), ", ", ", and ")). """ function except(errors::Symbol...) invalid_errors = setdiff(errors, ERROR_NAMES) - isempty(invalid_errors) || throw(DomainError( - tuple(invalid_errors...), - "Invalid error classes passed to Documenter.except. Valid error classes are: $(ERROR_NAMES)" - )) + isempty(invalid_errors) || throw( + DomainError( + tuple(invalid_errors...), + "Invalid error classes passed to Documenter.except. Valid error classes are: $(ERROR_NAMES)" + ) + ) setdiff(ERROR_NAMES, errors) end @@ -340,12 +344,12 @@ See the [Remote repository links](@ref) section in the manualfor more informatio struct MissingRemoteError <: Exception path::String linerange::Any - mod::Union{Module, Nothing} + mod::Union{Module,Nothing} function MissingRemoteError(; path::AbstractString, linerange=nothing, - mod::Union{Module, Nothing}=nothing + mod::Union{Module,Nothing}=nothing ) new(path, linerange, mod) end @@ -356,10 +360,13 @@ function Base.showerror(io::IO, e::MissingRemoteError) isnothing(e.linerange) || print(io, ':', e.linerange) println(io) isnothing(e.mod) || println(io, " module: ", e.mod) - print(io, """ - Documenter was unable to automatically determine the remote repository for this file. - This can happen if you are including docstrings or pages from secondary packages. Those packages - must be cloned as Git repositories (i.e. Pkg.develop instead Pkg.add), or the `remotes` keyword - must be configured appropriately. See the 'Remote repository links' section in the manual for - more information.""") + print( + io, + """ +Documenter was unable to automatically determine the remote repository for this file. +This can happen if you are including docstrings or pages from secondary packages. Those packages +must be cloned as Git repositories (i.e. Pkg.develop instead Pkg.add), or the `remotes` keyword +must be configured appropriately. See the 'Remote repository links' section in the manual for +more information.""" + ) end diff --git a/src/utilities/DOM.jl b/src/utilities/DOM.jl index c87757e7d7..b7d4a1c862 100644 --- a/src/utilities/DOM.jl +++ b/src/utilities/DOM.jl @@ -106,19 +106,93 @@ export @tags # - https://developer.mozilla.org/en-US/docs/Glossary/empty_element # const BLOCK_ELEMENTS = Set([ - :address, :article, :aside, :blockquote, :canvas, :dd, :div, :dl, - :fieldset, :figcaption, :figure, :footer, :form, :h1, :h2, :h3, :h4, :h5, - :h6, :header, :hgroup, :hr, :li, :main, :nav, :noscript, :ol, :output, :p, - :pre, :section, :table, :tfoot, :ul, :video, + :address, + :article, + :aside, + :blockquote, + :canvas, + :dd, + :div, + :dl, + :fieldset, + :figcaption, + :figure, + :footer, + :form, + :h1, + :h2, + :h3, + :h4, + :h5, + :h6, + :header, + :hgroup, + :hr, + :li, + :main, + :nav, + :noscript, + :ol, + :output, + :p, + :pre, + :section, + :table, + :tfoot, + :ul, + :video, ]) const INLINE_ELEMENTS = Set([ - :a, :abbr, :acronym, :b, :bdo, :big, :br, :button, :cite, :code, :dfn, :em, - :i, :img, :input, :kbd, :label, :map, :object, :q, :samp, :script, :select, - :small, :span, :strong, :sub, :sup, :textarea, :time, :tt, :var, + :a, + :abbr, + :acronym, + :b, + :bdo, + :big, + :br, + :button, + :cite, + :code, + :dfn, + :em, + :i, + :img, + :input, + :kbd, + :label, + :map, + :object, + :q, + :samp, + :script, + :select, + :small, + :span, + :strong, + :sub, + :sup, + :textarea, + :time, + :tt, + :var, ]) const VOID_ELEMENTS = Set([ - :area, :base, :br, :col, :command, :embed, :hr, :img, :input, :keygen, - :link, :meta, :param, :source, :track, :wbr, + :area, + :base, + :br, + :col, + :command, + :embed, + :hr, + :img, + :input, + :keygen, + :link, + :meta, + :param, + :source, + :track, + :wbr, ]) const ALL_ELEMENTS = union(BLOCK_ELEMENTS, INLINE_ELEMENTS, VOID_ELEMENTS) @@ -135,7 +209,7 @@ Use [`@tags`](@ref) to define instances of this type rather than manually creating them via `Tag(:tagname)`. """ struct Tag - name :: Symbol + name::Symbol end Base.show(io::IO, t::Tag) = print(io, "<", t.name, ">") @@ -161,10 +235,12 @@ function template(args...) end ``` """ -macro tags(args...) esc(tags(args)) end +macro tags(args...) + esc(tags(args)) +end tags(s) = :(($(s...),) = $(map(Tag, s))) -const Attributes = Vector{Pair{Symbol, String}} +const Attributes = Vector{Pair{Symbol,String}} """ Represents an element within an HTML document including any textual content, @@ -174,12 +250,13 @@ This type should not be constructed directly, but instead via `(...)` and `[...]` applied to a [`Tag`](@ref) or another [`Node`](@ref) object. """ struct Node - name :: Symbol - text :: String - attributes :: Attributes - nodes :: Vector{Node} + name::Symbol + text::String + attributes::Attributes + nodes::Vector{Node} - Node(name::Symbol, attr::Attributes, data::Vector{Node}) = new(name, EMPTY_STRING, attr, data) + Node(name::Symbol, attr::Attributes, data::Vector{Node}) = + new(name, EMPTY_STRING, attr, data) Node(text::AbstractString) = new(TEXT, text) end @@ -200,7 +277,7 @@ attr(args) = flatten!(attributes!, Attributes(), args) # # Types that must not be flattened when constructing a `Node`'s child vector. # -const Atom = Union{AbstractString, Node, Pair, Symbol} +const Atom = Union{AbstractString,Node,Pair,Symbol} """ # Signatures @@ -214,7 +291,9 @@ Flatten the contents the third argument into the second after applying the function `f!` to the element. """ flatten!(f!, out, x::Atom) = f!(out, x) -flatten!(f!, out, xs) = (for x in xs; flatten!(f!, out, x); end; out) +flatten!(f!, out, xs) = (for x in xs + flatten!(f!, out, x) +end; out) # # Helper methods for handling flattening children elements in `Node` construction. @@ -231,7 +310,7 @@ function attributes!(out, s::AbstractString) print(startswith(x.match, '.') ? class : id, x.captures[1], ' ') end position(class) === 0 || push!(out, tostr(:class => rstrip(String(take!(class))))) - position(id) === 0 || push!(out, tostr(:id => rstrip(String(take!(id))))) + position(id) === 0 || push!(out, tostr(:id => rstrip(String(take!(id))))) return out end attributes!(out, s::Symbol) = push!(out, tostr(s => "")) @@ -283,11 +362,19 @@ function escapehtml(text::AbstractString) if occursin(r"[<>&'\"]", text) buffer = IOBuffer() for char in text - char === '<' ? write(buffer, "<") : - char === '>' ? write(buffer, ">") : - char === '&' ? write(buffer, "&") : - char === '\'' ? write(buffer, "'") : - char === '"' ? write(buffer, """) : write(buffer, char) + if char === '<' + write(buffer, "<") + elseif char === '>' + write(buffer, ">") + elseif char === '&' + write(buffer, "&") + elseif char === '\'' + write(buffer, "'") + elseif char === '"' + write(buffer, """) + else + write(buffer, char) + end end String(take!(buffer)) else diff --git a/src/utilities/JSDependencies.jl b/src/utilities/JSDependencies.jl index 12a49c5e74..734d388bbb 100644 --- a/src/utilities/JSDependencies.jl +++ b/src/utilities/JSDependencies.jl @@ -25,13 +25,18 @@ RemoteLibrary(name::AbstractString, url::AbstractString; deps=String[], exports= ``` """ struct RemoteLibrary - name :: String - url :: String + name::String + url::String # The following become part of the shim - deps :: Vector{String} - exports :: Union{Nothing, String} - - function RemoteLibrary(name::AbstractString, url::AbstractString; deps=String[], exports=nothing) + deps::Vector{String} + exports::Union{Nothing,String} + + function RemoteLibrary( + name::AbstractString, + url::AbstractString; + deps=String[], + exports=nothing + ) new(name, url, deps, exports) end end @@ -56,9 +61,9 @@ Snippet(deps::AbstractVector, args::AbstractVector, js::AbstractString) ``` """ struct Snippet - deps :: Vector{String} - args :: Vector{String} - js :: String + deps::Vector{String} + args::Vector{String} + js::String function Snippet(deps::AbstractVector, args::AbstractVector, js::AbstractString) new(deps, args, js) @@ -88,12 +93,14 @@ RequireJS(libraries::AbstractVector{RemoteLibrary}, snippets::AbstractVector{Sni * """ struct RequireJS - libraries :: Dict{String, RemoteLibrary} - snippets :: Vector{Snippet} - - function RequireJS(libraries::AbstractVector, snippets::AbstractVector = Snippet[]) - all(x -> isa(x, RemoteLibrary), libraries) || throw(ArgumentError("Bad element types for `libraries`: $(typeof.(libraries))")) - all(x -> isa(x, Snippet), snippets) || throw(ArgumentError("Bad element types for `snippets`: $(typeof.(snippets))")) + libraries::Dict{String,RemoteLibrary} + snippets::Vector{Snippet} + + function RequireJS(libraries::AbstractVector, snippets::AbstractVector=Snippet[]) + all(x -> isa(x, RemoteLibrary), libraries) || + throw(ArgumentError("Bad element types for `libraries`: $(typeof.(libraries))")) + all(x -> isa(x, Snippet), snippets) || + throw(ArgumentError("Bad element types for `snippets`: $(typeof.(snippets))")) r = new(Dict(), []) for library in libraries push!(r, library) @@ -157,16 +164,22 @@ function writejs(filename::AbstractString, r::RequireJS) end function writejs(io::IO, r::RequireJS) - write(io, """ - // Generated by Documenter.jl - requirejs.config({ - paths: { - """) + write( + io, + """ +// Generated by Documenter.jl +requirejs.config({ + paths: { +""" + ) for (name, lib) in r.libraries url = endswith(lib.url, ".js") ? replace(lib.url, r"\.js$" => "") : lib.url - write(io, """ - '$(jsescape(lib.name))': '$(jsescape(url))', - """) + write( + io, + """ + '$(jsescape(lib.name))': '$(jsescape(url))', + """ + ) end write(io, " }") @@ -177,12 +190,15 @@ function writejs(io::IO, r::RequireJS) for s in r.snippets args = join(s.args, ", ") # Note: not string literals => no escaping deps = join(("\'$(jsescape(d))\'" for d in s.deps), ", ") - write(io, """ - $("/"^80) - require([$(deps)], function($(args)) { - $(s.js) - }) - """) + write( + io, + """ + $("/"^80) + require([$(deps)], function($(args)) { + $(s.js) + }) + """ + ) end end @@ -243,9 +259,10 @@ shorter than the list of dependencies. """ function parse_snippet end -parse_snippet(filename::AbstractString; kwargs...) = open(filename, "r") do io - parse_snippet(io; kwargs...) -end +parse_snippet(filename::AbstractString; kwargs...) = + open(filename, "r") do io + parse_snippet(io; kwargs...) + end function parse_snippet(io::IO) libraries = String[] diff --git a/src/utilities/MDFlatten.jl b/src/utilities/MDFlatten.jl index a414975f2b..df87a620aa 100644 --- a/src/utilities/MDFlatten.jl +++ b/src/utilities/MDFlatten.jl @@ -21,7 +21,7 @@ then be used as input for search engines. mdflatten(node) = sprint(mdflatten, node) mdflatten(io, node::Node) = mdflatten(io, node, node.element) -mdflatten(io, nodes::Vector{T}) where T <: Node = foreach(n -> mdflatten(io, n), nodes) +mdflatten(io, nodes::Vector{T}) where {T<:Node} = foreach(n -> mdflatten(io, n), nodes) function mdflatten(io, children::MarkdownAST.NodeChildren) # this special case separates top level blocks with newlines newlines = isa(children.parent.element, MarkdownAST.Document) @@ -31,19 +31,24 @@ function mdflatten(io, children::MarkdownAST.NodeChildren) end end -mdflatten(io, node::Node, e::MarkdownAST.AbstractElement) = error("Unimplemented element for mdflatten: $(typeof(e))") +mdflatten(io, node::Node, e::MarkdownAST.AbstractElement) = + error("Unimplemented element for mdflatten: $(typeof(e))") # Most block and inline (container) elements just reduce down to printing out their # child nodes. -mdflatten(io, node::Node, ::Union{ - MarkdownAST.Document, - MarkdownAST.Heading, - MarkdownAST.Paragraph, - MarkdownAST.BlockQuote, - MarkdownAST.Link, - MarkdownAST.Strong, - MarkdownAST.Emph, -}) = mdflatten(io, node.children) +mdflatten( + io, + node::Node, + ::Union{ + MarkdownAST.Document, + MarkdownAST.Heading, + MarkdownAST.Paragraph, + MarkdownAST.BlockQuote, + MarkdownAST.Link, + MarkdownAST.Strong, + MarkdownAST.Emph, + } +) = mdflatten(io, node.children) function mdflatten(io, node::Node, list::MarkdownAST.List) for (idx, li) in enumerate(node.children) @@ -56,7 +61,7 @@ function mdflatten(io, node::Node, list::MarkdownAST.List) end function mdflatten(io, node::Node, t::MarkdownAST.Table) rows = collect(Iterators.flatten(thtb.children for thtb in node.children)) - for (idx, row) = enumerate(rows) + for (idx, row) in enumerate(rows) for (jdx, x) in enumerate(row.children) mdflatten(io, x.children) jdx == length(row.children) || print(io, ' ') @@ -72,12 +77,14 @@ function mdflatten(io, node::Node, e::MarkdownAST.Image) mdflatten(io, node.children) print(io, ")") end -mdflatten(io, node::Node, m::Union{MarkdownAST.InlineMath, MarkdownAST.DisplayMath}) = print(io, replace(m.math, r"[^()+\-*^=\w\s]" => "")) +mdflatten(io, node::Node, m::Union{MarkdownAST.InlineMath,MarkdownAST.DisplayMath}) = + print(io, replace(m.math, r"[^()+\-*^=\w\s]" => "")) mdflatten(io, node::Node, e::MarkdownAST.LineBreak) = print(io, '\n') mdflatten(io, node::Node, ::MarkdownAST.ThematicBreak) = nothing # Is both inline and block -mdflatten(io, node::Node, c::Union{MarkdownAST.Code, MarkdownAST.CodeBlock}) = print(io, c.code) +mdflatten(io, node::Node, c::Union{MarkdownAST.Code,MarkdownAST.CodeBlock}) = + print(io, c.code) # Special (inline) "node" -- due to JuliaMark's interpolations mdflatten(io, node::Node, value::MarkdownAST.JuliaValue) = print(io, value.ref) diff --git a/src/utilities/Remotes.jl b/src/utilities/Remotes.jl index 1a3add0f50..36e858925a 100644 --- a/src/utilities/Remotes.jl +++ b/src/utilities/Remotes.jl @@ -99,7 +99,12 @@ function repofile(remote::Remote, ref, filename, linerange=nothing) filename = lstrip(filename, '/') # remove leading spaces # Only pass UnitRanges to user code (even though we require the users to support any # collection supporting first/last). - fileurl(remote, ref, filename, isnothing(linerange) ? nothing : Int(first(linerange)):Int(last(linerange))) + fileurl( + remote, + ref, + filename, + isnothing(linerange) ? nothing : Int(first(linerange)):Int(last(linerange)) + ) end """ @@ -119,8 +124,8 @@ The single-argument constructor assumes that the user and repository parts are s a slash (e.g. `JuliaDocs/Documenter.jl`). """ struct GitHub <: Remote - user :: String - repo :: String + user::String + repo::String end function GitHub(remote::AbstractString) user, repo = split(remote, '/') @@ -155,9 +160,9 @@ repository parts are separated by a slash (e.g., `JuliaDocs/Documenter.jl`). """ struct GitLab <: Remote - host :: String - user :: String - repo :: String + host::String + user::String + repo::String end GitLab(user::AbstractString, repo::AbstractString) = GitLab("gitlab.com", user, repo) function GitLab(remote::AbstractString) @@ -208,14 +213,15 @@ However, an explicit [`Remote`](@ref) object is preferred over using a template configuring Documenter. """ struct URL <: Remote - urltemplate :: String - repourl :: Union{String, Nothing} + urltemplate::String + repourl::Union{String,Nothing} URL(urltemplate, repourl=nothing) = new(urltemplate, repourl) end repourl(remote::URL) = remote.repourl function fileurl(remote::URL, ref, filename, linerange) hosttype = repo_host_from_url(remote.urltemplate) - lines = (linerange === nothing) ? "" : format_line(linerange, LineRangeFormatting(hosttype)) + lines = + (linerange === nothing) ? "" : format_line(linerange, LineRangeFormatting(hosttype)) ref = format_commit(ref, hosttype) # lines = if linerange !== nothing # end diff --git a/src/utilities/Selectors.jl b/src/utilities/Selectors.jl index 48e2f57ff2..bea113d3fc 100644 --- a/src/utilities/Selectors.jl +++ b/src/utilities/Selectors.jl @@ -142,7 +142,7 @@ Selectors.matcher(::Type{Debug}, x) = true Selectors.runner(::Type{Debug}, x) = @show x ``` """ -strict(::Type{T}) where {T <: AbstractSelector} = true +strict(::Type{T}) where {T<:AbstractSelector} = true """ Disable a particular case in a selector so that it is never used. @@ -151,7 +151,7 @@ Disable a particular case in a selector so that it is never used. Selectors.disable(::Type{Debug}) = true ``` """ -disable(::Type{T}) where {T <: AbstractSelector} = false +disable(::Type{T}) where {T<:AbstractSelector} = false """ Call `Selectors.runner(T, args...)` where `T` is a subtype of @@ -161,9 +161,9 @@ Call `Selectors.runner(T, args...)` where `T` is a subtype of Selectors.dispatch(MySelector, args...) ``` """ -function dispatch(::Type{T}, x...) where T <: AbstractSelector +function dispatch(::Type{T}, x...) where {T<:AbstractSelector} types = get!(selector_subtypes, T) do - sort(leaf_subtypes(T); by = order) + sort(leaf_subtypes(T); by=order) end for t in types if !disable(t) && matcher(t, x...) @@ -181,7 +181,7 @@ The returned list includes subtypes of subtypes, and it does not distinguish between concrete types (i.e. types which are guaranteed not to have subtypes) and abstract types (which may or may not have subtypes). """ -function leaf_subtypes(::Type{T}) where T +function leaf_subtypes(::Type{T}) where {T} stack = Type[T] leaves = Type[] while !isempty(stack) diff --git a/src/utilities/TextDiff.jl b/src/utilities/TextDiff.jl index d77e0ce26b..1020ee0567 100644 --- a/src/utilities/TextDiff.jl +++ b/src/utilities/TextDiff.jl @@ -8,9 +8,12 @@ function lcs(old_tokens::Vector, new_tokens::Vector) m = length(old_tokens) n = length(new_tokens) weights = zeros(Int, m + 1, n + 1) - for i = 2:(m + 1), j = 2:(n + 1) - weights[i, j] = old_tokens[i - 1] == new_tokens[j - 1] ? - weights[i - 1, j - 1] + 1 : max(weights[i, j - 1], weights[i - 1, j]) + for i = 2:(m+1), j = 2:(n+1) + weights[i, j] = if old_tokens[i-1] == new_tokens[j-1] + weights[i-1, j-1] + 1 + else + max(weights[i, j-1], weights[i-1, j]) + end end return weights end @@ -18,22 +21,22 @@ end function makediff(weights::Matrix, old_tokens::Vector, new_tokens::Vector) m = length(old_tokens) n = length(new_tokens) - diff = Vector{Pair{Symbol, SubString{String}}}() + diff = Vector{Pair{Symbol,SubString{String}}}() makediff!(diff, weights, old_tokens, new_tokens, m + 1, n + 1) return diff end function makediff!(out, weights, X, Y, i, j) - if i > 1 && j > 1 && X[i - 1] == Y[j - 1] + if i > 1 && j > 1 && X[i-1] == Y[j-1] makediff!(out, weights, X, Y, i - 1, j - 1) - push!(out, :normal => X[i - 1]) + push!(out, :normal => X[i-1]) else - if j > 1 && (i == 1 || weights[i, j - 1] >= weights[i - 1, j]) + if j > 1 && (i == 1 || weights[i, j-1] >= weights[i-1, j]) makediff!(out, weights, X, Y, i, j - 1) - push!(out, :green => Y[j - 1]) - elseif i > 1 && (j == 1 || weights[i, j - 1] < weights[i - 1, j]) + push!(out, :green => Y[j-1]) + elseif i > 1 && (j == 1 || weights[i, j-1] < weights[i-1, j]) makediff!(out, weights, X, Y, i - 1, j) - push!(out, :red => X[i - 1]) + push!(out, :red => X[i-1]) end end return out @@ -70,9 +73,9 @@ struct Diff{T} old_tokens::Vector{SubString{String}} new_tokens::Vector{SubString{String}} weights::Matrix{Int} - diff::Vector{Pair{Symbol, SubString{String}}} + diff::Vector{Pair{Symbol,SubString{String}}} - function Diff{T}(old_text::AbstractString, new_text::AbstractString) where T + function Diff{T}(old_text::AbstractString, new_text::AbstractString) where {T} reg = splitter(T) old_tokens = splitby(reg, old_text) new_tokens = splitby(reg, new_text) @@ -84,7 +87,7 @@ end # Display. -prefix(::Diff{Lines}, s::Symbol) = s === :green ? "+ " : s === :red ? "- " : " " +prefix(::Diff{Lines}, s::Symbol) = s === :green ? "+ " : s === :red ? "- " : " " prefix(::Diff{Words}, ::Symbol) = "" function Base.show(io::IO, diff::Diff) diff --git a/src/utilities/utilities.jl b/src/utilities/utilities.jl index 1dc397e2ee..e8e4e6d4ac 100644 --- a/src/utilities/utilities.jl +++ b/src/utilities/utilities.jl @@ -8,8 +8,17 @@ import Base64: stringmime using .Remotes: Remote, repourl, repofile # These imports are here to support code that still assumes that these names are defined # in the Utilities module. -using .Remotes: RepoHost, RepoGithub, RepoBitbucket, RepoGitlab, RepoAzureDevOps, - RepoUnknown, format_commit, format_line, repo_host_from_url, LineRangeFormatting +using .Remotes: + RepoHost, + RepoGithub, + RepoBitbucket, + RepoGitlab, + RepoAzureDevOps, + RepoUnknown, + format_commit, + format_line, + repo_host_from_url, + LineRangeFormatting """ @docerror(doc, tag, msg, exs...) @@ -24,8 +33,10 @@ error (if `tag` matches the `doc.user.strict` setting) or warning. see `@error` and `@warn` """ macro docerror(doc, tag, msg, exs...) - isa(tag, QuoteNode) && isa(tag.value, Symbol) || error("invalid call of @docerror: tag=$tag") - tag.value ∈ ERROR_NAMES || throw(ArgumentError("tag $(tag) is not a valid Documenter error")) + isa(tag, QuoteNode) && isa(tag.value, Symbol) || + error("invalid call of @docerror: tag=$tag") + tag.value ∈ ERROR_NAMES || + throw(ArgumentError("tag $(tag) is not a valid Documenter error")) doc, msg = esc(doc), esc(msg) # The `exs` portion can contain variable name / label overrides, i.e. `foo = bar()` # We don't want to apply esc() on new labels, since they get printed as expressions then. @@ -90,7 +101,7 @@ Returns the path to the Documenter `assets` directory. """ assetsdir() = normpath(joinpath(dirname(@__FILE__), "..", "..", "assets")) -cleandir(d::AbstractString) = (isdir(d) && rm(d, recursive = true); mkdir(d)) +cleandir(d::AbstractString) = (isdir(d) && rm(d, recursive=true); mkdir(d)) """ Find the path of a file relative to the `source` directory. `root` is the path @@ -132,11 +143,18 @@ returns this expression normally and it must be handled appropriately by the cal The `linenumbernode` can be passed as a `LineNumberNode` to give information about filename and starting line number of the block (requires Julia 1.6 or higher). """ -function parseblock(code::AbstractString, doc, file; skip = 0, keywords = true, raise=true, - linenumbernode=nothing) +function parseblock( + code::AbstractString, + doc, + file; + skip=0, + keywords=true, + raise=true, + linenumbernode=nothing +) # Drop `skip` leading lines from the code block. Needed for deprecated `{docs}` syntax. code = string(code, '\n') - code = last(split(code, '\n', limit = skip + 1)) + code = last(split(code, '\n', limit=skip + 1)) endofstr = lastindex(code) results = [] cursor = 1 @@ -145,14 +163,19 @@ function parseblock(code::AbstractString, doc, file; skip = 0, keywords = true, line = match(r"^(.*)\r?\n"m, SubString(code, cursor)).match keyword = Symbol(strip(line)) (ex, ncursor) = - # TODO: On 0.7 Symbol("") is in Docs.keywords, remove that check when dropping 0.6 + # TODO: On 0.7 Symbol("") is in Docs.keywords, remove that check when dropping 0.6 if keywords && (haskey(Docs.keywords, keyword) || keyword == Symbol("")) (QuoteNode(keyword), cursor + lastindex(line)) else try Meta.parse(code, cursor; raise=raise) catch err - @docerror(doc, :parse_error, "failed to parse exception in $(locrepr(file))", exception = err) + @docerror( + doc, + :parse_error, + "failed to parse exception in $(locrepr(file))", + exception = err + ) break end end @@ -179,7 +202,7 @@ function parseblock(code::AbstractString, doc, file; skip = 0, keywords = true, else update_linenumbernodes!(expr, linenumbernode.file, linenumbernode.line) end - results[i] = (expr , results[i][2]) + results[i] = (expr, results[i][2]) end end results @@ -187,7 +210,7 @@ end isassign(x) = isexpr(x, :(=), 2) && isa(x.args[1], Symbol) function update_linenumbernodes!(x::Expr, newfile, lineshift) - for i in 1:length(x.args) + for i = 1:length(x.args) x.args[i] = update_linenumbernodes!(x.args[i], newfile, lineshift) end return x @@ -200,7 +223,7 @@ end # Finding submodules. -const ModVec = Union{Module, Vector{Module}} +const ModVec = Union{Module,Vector{Module}} """ Returns the set of submodules of a given root module/s. @@ -212,12 +235,14 @@ function submodules(modules::Vector{Module}) end out end -function submodules(root::Module, seen = Set{Module}()) +function submodules(root::Module, seen=Set{Module}()) push!(seen, root) for name in names(root, all=true) if Base.isidentifier(name) && isdefined(root, name) && !isdeprecated(root, name) object = getfield(root, name) - if isa(object, Module) && !(object in seen) && parentmodule(object::Module) == root + if isa(object, Module) && + !(object in seen) && + parentmodule(object::Module) == root submodules(object, seen) end end @@ -236,9 +261,9 @@ end Represents an object stored in the docsystem by its binding and signature. """ struct Object - binding :: Binding - signature :: Type - noncanonical_extra :: Union{String, Nothing} + binding::Binding + signature::Type + noncanonical_extra::Union{String,Nothing} function Object(b::Binding, signature::Type, noncanonical_extra=nothing) m = nameof(b.mod) === b.var ? parentmodule(b.mod) : b.mod @@ -249,9 +274,13 @@ end is_canonical(o::Object) = o.noncanonical_extra === nothing function splitexpr(x::Expr) - isexpr(x, :macrocall) ? splitexpr(x.args[1]) : - isexpr(x, :.) ? (x.args[1], x.args[2]) : - error("Invalid @var syntax `$x`.") + if isexpr(x, :macrocall) + splitexpr(x.args[1]) + elseif isexpr(x, :.) + (x.args[1], x.args[2]) + else + error("Invalid @var syntax `$x`.") + end end splitexpr(s::Symbol) = :(Main), quot(s) splitexpr(other) = error("Invalid @var syntax `$other`.") @@ -261,7 +290,7 @@ splitexpr(other) = error("Invalid @var syntax `$other`.") Returns a expression that, when evaluated, returns an [`Object`](@ref) representing `ex`. """ -function object(ex::Union{Symbol, Expr}, str::AbstractString) +function object(ex::Union{Symbol,Expr}, str::AbstractString) binding = Expr(:call, Binding, splitexpr(Docs.namify(ex))...) signature = Base.Docs.signature(ex) isexpr(ex, :macrocall, 2) && !endswith(str, "()") && (signature = :(Union{})) @@ -282,10 +311,10 @@ function Base.print(io::IO, obj::Object) print_signature(io, obj.signature) print_extra(io, obj.noncanonical_extra) end -print_extra(io::IO, noncanonical_extra::Nothing ) = nothing -print_extra(io::IO, noncanonical_extra::String ) = print(io, "-", noncanonical_extra) -print_signature(io::IO, signature::Union{Union, Type{Union{}}}) = nothing -print_signature(io::IO, signature) = print(io, '-', signature) +print_extra(io::IO, noncanonical_extra::Nothing) = nothing +print_extra(io::IO, noncanonical_extra::String) = print(io, "-", noncanonical_extra) +print_signature(io::IO, signature::Union{Union,Type{Union{}}}) = nothing +print_signature(io::IO, signature) = print(io, '-', signature) ## docs ## ==== @@ -298,7 +327,7 @@ Returns an expression that, when evaluated, returns the docstrings associated wi function docs end # Macro representation changed between 0.4 and 0.5. -function docs(ex::Union{Symbol, Expr}, str::AbstractString) +function docs(ex::Union{Symbol,Expr}, str::AbstractString) isexpr(ex, :macrocall, 2) && !endswith(rstrip(str), "()") && (ex = quot(ex)) :(Base.Docs.@doc $ex) end @@ -307,10 +336,10 @@ docs(qn::QuoteNode, str::AbstractString) = :(Base.Docs.@doc $(qn.value)) """ Returns the category name of the provided [`Object`](@ref). """ -doccat(obj::Object) = startswith(string(obj.binding.var), '@') ? - "Macro" : doccat(obj.binding, obj.signature) +doccat(obj::Object) = + startswith(string(obj.binding.var), '@') ? "Macro" : doccat(obj.binding, obj.signature) -function doccat(b::Binding, ::Union{Union, Type{Union{}}}) +function doccat(b::Binding, ::Union{Union,Type{Union{}}}) if b.mod === Main && haskey(Base.Docs.keywords, b.var) "Keyword" elseif startswith(string(b.var), '@') @@ -320,13 +349,13 @@ function doccat(b::Binding, ::Union{Union, Type{Union{}}}) end end -doccat(b::Binding, ::Type) = "Method" +doccat(b::Binding, ::Type) = "Method" doccat(::Function) = "Function" -doccat(::Type) = "Type" +doccat(::Type) = "Type" doccat(x::UnionAll) = doccat(Base.unwrap_unionall(x)) -doccat(::Module) = "Module" -doccat(::Any) = "Constant" +doccat(::Module) = "Module" +doccat(::Any) = "Constant" """ filterdocs(doc, modules) @@ -396,7 +425,8 @@ determine if this is a repository or not. If there is a file called `dbdir`, the contents is checked under the assumption that it is a Git worktree or a submodule. """ function find_root_parent(f, path) - ispath(path) || throw(ArgumentError("find_root_parent called with non-existent path\n path: $path")) + ispath(path) || + throw(ArgumentError("find_root_parent called with non-existent path\n path: $path")) path = realpath(path) parent_dir = isdir(path) ? path : dirname(path) parent_dir_last = "" @@ -433,38 +463,62 @@ end struct RepoCommitError <: Exception directory::String - msg :: String - err_bt :: Union{Tuple{Any,Any},Nothing} - RepoCommitError(directory::AbstractString, msg::AbstractString) = new(directory, msg, nothing) - RepoCommitError(directory::AbstractString, msg::AbstractString, e, bt) = new(directory, msg, (e, bt)) + msg::String + err_bt::Union{Tuple{Any,Any},Nothing} + RepoCommitError(directory::AbstractString, msg::AbstractString) = + new(directory, msg, nothing) + RepoCommitError(directory::AbstractString, msg::AbstractString, e, bt) = + new(directory, msg, (e, bt)) end function repo_commit(repository_root::AbstractString) - isdir(repository_root) || throw(RepoCommitError(repository_root, "repository_root not a directory")) + isdir(repository_root) || + throw(RepoCommitError(repository_root, "repository_root not a directory")) cd(repository_root) do try toplevel = readchomp(`$(git()) rev-parse --show-toplevel`) if !ispath(toplevel) - throw(RepoCommitError(repository_root, "`git rev-parse --show-toplevel` returned invalid path: $toplevel")) + throw( + RepoCommitError( + repository_root, + "`git rev-parse --show-toplevel` returned invalid path: $toplevel" + ) + ) end if realpath(toplevel) != realpath(repository_root) - throw(RepoCommitError( - repository_root, - """ - repository_root is not the top-level of the repository - `git rev-parse --show-toplevel`: $toplevel - repository_root: $repository_root - """ - )) + throw( + RepoCommitError( + repository_root, + """ + repository_root is not the top-level of the repository + `git rev-parse --show-toplevel`: $toplevel + repository_root: $repository_root + """ + ) + ) end catch e isa(e, RepoCommitError) && rethrow(e) - throw(RepoCommitError(repository_root, "`git rev-parse --show-toplevel` failed", e, catch_backtrace())) + throw( + RepoCommitError( + repository_root, + "`git rev-parse --show-toplevel` failed", + e, + catch_backtrace() + ) + ) end try readchomp(`$(git()) rev-parse HEAD`) catch e - throw(RepoCommitError(repository_root, "`git rev-parse HEAD` failed", e, catch_backtrace())) + throw( + RepoCommitError( + repository_root, + "`git rev-parse HEAD` failed", + e, + catch_backtrace() + ) + ) end end end @@ -505,7 +559,8 @@ function getremote(dir::AbstractString) remote = try readchomp(setenv(`$(git()) config --get remote.origin.url`; dir=dir)) catch e - @debug "git config --get remote.origin.url failed" exception=(e, catch_backtrace()) + @debug "git config --get remote.origin.url failed" exception = + (e, catch_backtrace()) "" end return parse_remote_url(remote) @@ -534,7 +589,7 @@ function linerange(text, from) # the .text field seems to become longer than just 1 element and every even element is the interpolated object, # and only the odd ones actually contain the docstring text as a string. lines = sum(Int[isodd(n) ? newlines(s) : 0 for (n, s) in enumerate(text)]) - return lines > 0 ? (from:(from + lines + 1)) : (from:from) + return lines > 0 ? (from:(from+lines+1)) : (from:from) end newlines(s::AbstractString) = count(c -> c === '\n', s) @@ -581,15 +636,18 @@ The `mode` keyword argument can be one of the following: This requires the string to parse into a single `Markdown.Paragraph`, the contents of which gets returned. """ -function mdparse(s::AbstractString; mode=:single) :: Vector{MarkdownAST.Node{Nothing}} - mode in [:single, :blocks, :span] || throw(ArgumentError("Invalid mode keyword $(mode)")) +function mdparse(s::AbstractString; mode=:single)::Vector{MarkdownAST.Node{Nothing}} + mode in [:single, :blocks, :span] || + throw(ArgumentError("Invalid mode keyword $(mode)")) mdast = convert(MarkdownAST.Node, Markdown.parse(s)) if mode == :blocks MarkdownAST.unlink!.(mdast.children) elseif length(mdast.children) == 0 # case where s == "". We'll just return an empty string / paragraph. if mode == :single - [MarkdownAST.@ast(MarkdownAST.Paragraph() do; ""; end)] + [MarkdownAST.@ast(MarkdownAST.Paragraph() do + "" + end)] else # If we're in span mode we return a single Text node [MarkdownAST.@ast("")] @@ -605,26 +663,37 @@ function mdparse(s::AbstractString; mode=:single) :: Vector{MarkdownAST.Node{Not @error "mode == :$(mode) requires the Markdown string to parse into a MarkdownAST.Paragraph" s mdast throw(ArgumentError("Unsuitable string for mode=:$(mode)")) end - (mode == :single) ? [MarkdownAST.unlink!(childnode)] : MarkdownAST.unlink!.(childnode.children) + if (mode == :single) + [MarkdownAST.unlink!(childnode)] + else + MarkdownAST.unlink!.(childnode.children) + end end end # Capturing output in different representations similar to IJulia.jl -function limitstringmime(m::MIME"text/plain", x; context = nothing) +function limitstringmime(m::MIME"text/plain", x; context=nothing) io = IOBuffer() ioc = IOContext(context === nothing ? io : IOContext(io, context), :limit => true) show(ioc, m, x) return String(take!(io)) end -function display_dict(x; context = nothing) +function display_dict(x; context=nothing) out = Dict{MIME,Any}() x === nothing && return out # Always generate text/plain - out[MIME"text/plain"()] = limitstringmime(MIME"text/plain"(), x, context = context) - for m in [MIME"text/html"(), MIME"image/svg+xml"(), MIME"image/png"(), - MIME"image/webp"(), MIME"image/gif"(), MIME"image/jpeg"(), - MIME"text/latex"(), MIME"text/markdown"()] - showable(m, x) && (out[m] = stringmime(m, x, context = context)) + out[MIME"text/plain"()] = limitstringmime(MIME"text/plain"(), x, context=context) + for m in [ + MIME"text/html"(), + MIME"image/svg+xml"(), + MIME"image/png"(), + MIME"image/webp"(), + MIME"image/gif"(), + MIME"image/jpeg"(), + MIME"text/latex"(), + MIME"text/markdown"() + ] + showable(m, x) && (out[m] = stringmime(m, x, context=context)) end return out end @@ -647,7 +716,7 @@ end ``` """ struct Default{T} - value :: T + value::T end Base.getindex(default::Default) = default.value @@ -661,7 +730,7 @@ function codelang(infostring::AbstractString) return m[1] end -function get_sandbox_module!(meta, prefix, name = nothing) +function get_sandbox_module!(meta, prefix, name=nothing) sym = if name === nothing || isempty(name) Symbol("__", prefix, "__", lstrip(string(gensym()), '#')) else @@ -688,8 +757,8 @@ it out automatically. `root` is the the directory where `git` gets run. `varname` is just informational and used to construct the warning messages. """ -function git_remote_head_branch(varname, root; remotename = "origin", fallback = "master") - gitcmd = git(nothrow = true) +function git_remote_head_branch(varname, root; remotename="origin", fallback="master") + gitcmd = git(nothrow=true) if gitcmd === nothing @warn """ Unable to determine $(varname) from remote HEAD branch, defaulting to "$(fallback)". @@ -707,14 +776,15 @@ function git_remote_head_branch(varname, root; remotename = "origin", fallback = ) stderr_output = IOBuffer() git_remote_output = try - read(pipeline(cmd; stderr = stderr_output), String) + read(pipeline(cmd; stderr=stderr_output), String) catch e @warn """ Unable to determine $(varname) from remote HEAD branch, defaulting to "$(fallback)". Calling `git remote` failed with an exception. Set JULIA_DEBUG=Documenter to see the error. Unless this is due to a configuration error, the relevant variable should be set explicitly. """ - @debug "Command: $cmd" exception = (e, catch_backtrace()) stderr = String(take!(stderr_output)) + @debug "Command: $cmd" exception = (e, catch_backtrace()) stderr = + String(take!(stderr_output)) return fallback end m = match(r"^\s*HEAD branch:\s*(.*)$"m, git_remote_output) @@ -757,11 +827,12 @@ dropheaders(h::Markdown.Header) = Markdown.Paragraph([Markdown.Bold(h.text)]) dropheaders(v::Vector) = map(dropheaders, v) dropheaders(other) = other -function git(; nothrow = false, kwargs...) +function git(; nothrow=false, kwargs...) # DOCUMENTER_KEY etc are never needed for git operations cmd = addenv(Git.git(), NO_KEY_ENV) if Sys.iswindows() - cmd = addenv(cmd, + cmd = addenv( + cmd, # For deploydocs() in particular, we need to use symlinks, but it looks like those # need to be explicitly force-enabled on Windows. So we make sure that we configure # core.symlinks=true via environment variables on that platform. @@ -779,13 +850,14 @@ function git(; nothrow = false, kwargs...) return cmd end -function remove_common_backtrace(bt, reference_bt = backtrace()) +function remove_common_backtrace(bt, reference_bt=backtrace()) cutoff = nothing # We'll start from the top of the backtrace (end of the array) and go down, checking # if the backtraces agree - for ridx in 1:length(bt) + for ridx = 1:length(bt) # Cancel search if we run out the reference BT or find a non-matching one frames: - if ridx > length(reference_bt) || bt[length(bt) - ridx + 1] != reference_bt[length(reference_bt) - ridx + 1] + if ridx > length(reference_bt) || + bt[length(bt)-ridx+1] != reference_bt[length(reference_bt)-ridx+1] cutoff = length(bt) - ridx + 1 break end diff --git a/src/writers.jl b/src/writers.jl index 3b72628d11..6332c1503d 100644 --- a/src/writers.jl +++ b/src/writers.jl @@ -10,17 +10,17 @@ abstract type FormatSelector <: Selectors.AbstractSelector end -abstract type LaTeXFormat <: FormatSelector end -abstract type HTMLFormat <: FormatSelector end +abstract type LaTeXFormat <: FormatSelector end +abstract type HTMLFormat <: FormatSelector end -Selectors.order(::Type{LaTeXFormat}) = 2.0 -Selectors.order(::Type{HTMLFormat}) = 3.0 +Selectors.order(::Type{LaTeXFormat}) = 2.0 +Selectors.order(::Type{HTMLFormat}) = 3.0 -Selectors.matcher(::Type{LaTeXFormat}, fmt, _) = isa(fmt, LaTeXWriter.LaTeX) -Selectors.matcher(::Type{HTMLFormat}, fmt, _) = isa(fmt, HTMLWriter.HTML) +Selectors.matcher(::Type{LaTeXFormat}, fmt, _) = isa(fmt, LaTeXWriter.LaTeX) +Selectors.matcher(::Type{HTMLFormat}, fmt, _) = isa(fmt, HTMLWriter.HTML) -Selectors.runner(::Type{LaTeXFormat}, fmt, doc) = LaTeXWriter.render(doc, fmt) -Selectors.runner(::Type{HTMLFormat}, fmt, doc) = HTMLWriter.render(doc, fmt) +Selectors.runner(::Type{LaTeXFormat}, fmt, doc) = LaTeXWriter.render(doc, fmt) +Selectors.runner(::Type{HTMLFormat}, fmt, doc) = HTMLWriter.render(doc, fmt) """ Writes a [`Documenter.Document`](@ref) object to `.user.build` directory in diff --git a/test/TestUtilities.jl b/test/TestUtilities.jl index 48b5470a4f..0022365721 100644 --- a/test/TestUtilities.jl +++ b/test/TestUtilities.jl @@ -32,7 +32,7 @@ function __init__() end struct QuietlyException <: Exception - logid :: Union{String,Nothing} + logid::Union{String,Nothing} exception backtrace end @@ -44,10 +44,13 @@ function Base.showerror(io::IO, e::QuietlyException) end function _quietly(f, expr, source) - c = IOCapture.capture(f; rethrow = InterruptException) + c = IOCapture.capture(f; rethrow=InterruptException) logid, logfile = quietly_next_log() isnothing(logid) || open(logfile; write=true, append=true) do io - println(io, "@quietly: c.error = $(c.error) / $(sizeof(c.output)) bytes of output captured") + println( + io, + "@quietly: c.error = $(c.error) / $(sizeof(c.output)) bytes of output captured" + ) println(io, "@quietly: $(source.file):$(source.line)") println(io, "@quietly: typeof(result) = ", typeof(c.value)) println(io, "@quietly: STDOUT") @@ -69,7 +72,10 @@ function _quietly(f, expr, source) $(expr) """ exception = (c.error, c.backtrace) if !isempty(c.output) - printstyled("$("="^21) $(prefix): output from the expression $("="^21)\n"; color=:magenta) + printstyled( + "$("="^21) $(prefix): output from the expression $("="^21)\n"; + color=:magenta + ) print(c.output) last(c.output) != "\n" && println() printstyled("$("="^27) $(prefix): end of output $("="^28)\n"; color=:magenta) @@ -82,14 +88,20 @@ function _quietly(f, expr, source) $(expr) """ TestSet = c.value if !isempty(c.output) - printstyled("$("="^21) $(prefix): output from the expression $("="^21)\n"; color=:magenta) + printstyled( + "$("="^21) $(prefix): output from the expression $("="^21)\n"; + color=:magenta + ) print(c.output) last(c.output) != "\n" && println() printstyled("$("="^27) $(prefix): end of output $("="^28)\n"; color=:magenta) end return c.value else - printstyled("$(prefix): success, $(sizeof(c.output)) bytes of output hidden\n"; color=:magenta) + printstyled( + "$(prefix): success, $(sizeof(c.output)) bytes of output hidden\n"; + color=:magenta + ) return c.value end end @@ -103,7 +115,8 @@ macro quietly(expr) end end -is_success(testset::Test.DefaultTestSet) = !(testset.anynonpass || !is_success(testset.results)) +is_success(testset::Test.DefaultTestSet) = + !(testset.anynonpass || !is_success(testset.results)) is_success(ts::AbstractArray) = all(is_success.(ts)) is_success(::Test.Fail) = false is_success(::Test.Pass) = true @@ -114,7 +127,7 @@ end function trun(cmd::Base.AbstractCmd) buffer = IOBuffer() - cmd_redirected = pipeline(cmd; stdin = devnull, stdout = buffer, stderr = buffer) + cmd_redirected = pipeline(cmd; stdin=devnull, stdout=buffer, stderr=buffer) try run(cmd_redirected) return true diff --git a/test/deployconfig.jl b/test/deployconfig.jl index 0b90ffca63..77479f57b9 100644 --- a/test/deployconfig.jl +++ b/test/deployconfig.jl @@ -1,8 +1,10 @@ using Logging -@testset "Travis CI deploy configuration" begin; with_logger(NullLogger()) do - # Regular tag build - withenv("TRAVIS_CI" => "true", +@testset "Travis CI deploy configuration" begin + with_logger(NullLogger()) do + # Regular tag build + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "false", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "master", @@ -10,18 +12,24 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Regular tag build with tag prefix - withenv("TRAVIS_CI" => "true", + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Regular tag build with tag prefix + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "false", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "master", @@ -29,19 +37,25 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", tag_prefix="MySubPackage-", - devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Broken tag build - withenv("TRAVIS_CI" => "true", + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + tag_prefix="MySubPackage-", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Broken tag build + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "false", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "master", @@ -49,13 +63,19 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular/broken devbranch build - withenv("TRAVIS_CI" => "true", + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular/broken devbranch build + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "false", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "master", @@ -63,20 +83,31 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular pull request build - withenv("TRAVIS_CI" => "true", + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular pull request build + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "42", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "something", @@ -84,20 +115,31 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Missing/broken environment variables - withenv("TRAVIS_CI" => "true", + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Missing/broken environment variables + withenv( + "TRAVIS_CI" => "true", "TRAVIS_PULL_REQUEST" => "false", "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", "TRAVIS_BRANCH" => "master", @@ -105,245 +147,361 @@ using Logging "TRAVIS_EVENT_TYPE" => nothing, "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.Travis() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok + cfg = Documenter.Travis() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + end end -end end +end -@testset "GitHub Actions deploy configuration" begin; with_logger(NullLogger()) do - # Regular tag build with GITHUB_TOKEN - withenv("GITHUB_EVENT_NAME" => "push", +@testset "GitHub Actions deploy configuration" begin + with_logger(NullLogger()) do + # Regular tag build with GITHUB_TOKEN + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/v1.2.3", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular tag build with SSH deploy key (SSH key prioritized) - withenv("GITHUB_EVENT_NAME" => "push", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular tag build with SSH deploy key (SSH key prioritized) + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/v1.2.3", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.authentication_method(cfg) === Documenter.SSH - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular tag build with GITHUB_TOKEN and with tag prefix - withenv("GITHUB_EVENT_NAME" => "push", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.authentication_method(cfg) === Documenter.SSH + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular tag build with GITHUB_TOKEN and with tag prefix + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/MySubPackage-v1.2.3", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Broken tag build - withenv("GITHUB_EVENT_NAME" => "push", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Broken tag build + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/not-a-version", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular devbranch build with GITHUB_TOKEN - withenv("GITHUB_EVENT_NAME" => "push", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular devbranch build with GITHUB_TOKEN + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/heads/master", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular devbranch build with SSH deploy key (SSH key prioritized) - withenv("GITHUB_EVENT_NAME" => "push", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular devbranch build with SSH deploy key (SSH key prioritized) + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/heads/master", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.SSH - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.SSH + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end - # These tests requires GITHUB_TOKEN to be set (and valid) in order to verify the origin - # of the PR. Only runs on CI. - if get(ENV, "GITHUB_ACTIONS", nothing) == "true" && haskey(ENV, "GITHUB_TOKEN") - # Regular pull request build with GITHUB_TOKEN - withenv("GITHUB_EVENT_NAME" => "pull_request", + # These tests requires GITHUB_TOKEN to be set (and valid) in order to verify the origin + # of the PR. Only runs on CI. + if get(ENV, "GITHUB_ACTIONS", nothing) == "true" && haskey(ENV, "GITHUB_TOKEN") + # Regular pull request build with GITHUB_TOKEN + withenv( + "GITHUB_EVENT_NAME" => "pull_request", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/pull/1962/merge", "GITHUB_ACTOR" => "github-actions", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR1962" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) == "https://github-actions:$(ENV["GITHUB_TOKEN"])@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular pull request build with GITHUB_TOKEN, PR from a fork - withenv("GITHUB_EVENT_NAME" => "pull_request", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR1962" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) == + "https://github-actions:$(ENV["GITHUB_TOKEN"])@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular pull request build with GITHUB_TOKEN, PR from a fork + withenv( + "GITHUB_EVENT_NAME" => "pull_request", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/pull/1967/merge", "GITHUB_ACTOR" => "github-actions", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok - end - # Regular pull request build with SSH deploy key (SSH key prioritized) - withenv("GITHUB_EVENT_NAME" => "pull_request", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end + # Regular pull request build with SSH deploy key (SSH key prioritized) + withenv( + "GITHUB_EVENT_NAME" => "pull_request", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/pull/1962/merge", "GITHUB_ACTOR" => "github-actions", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR1962" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.SSH - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular pull request build with SSH deploy key (SSH key prioritized), but push previews to a different repo and different branch - withenv("GITHUB_EVENT_NAME" => "pull_request", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR1962" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.SSH + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular pull request build with SSH deploy key (SSH key prioritized), but push previews to a different repo and different branch + withenv( + "GITHUB_EVENT_NAME" => "pull_request", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/pull/1962/merge", "GITHUB_ACTOR" => "github-actions", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true, - repo_previews="github.com/JuliaDocs/Documenter-previews.jl.git", - branch_previews="gh-pages-previews") - @test d.all_ok - @test d.subfolder == "previews/PR1962" - @test d.repo == "github.com/JuliaDocs/Documenter-previews.jl.git" - @test d.branch == "gh-pages-previews" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=false, - repo_previews="", - branch_previews="") - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.SSH - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular pull request build with SSH deploy key (SSH key prioritized), but push previews to a different repo and different branch; use a different deploy key for previews - withenv("GITHUB_EVENT_NAME" => "pull_request", + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true, + repo_previews="github.com/JuliaDocs/Documenter-previews.jl.git", + branch_previews="gh-pages-previews" + ) + @test d.all_ok + @test d.subfolder == "previews/PR1962" + @test d.repo == "github.com/JuliaDocs/Documenter-previews.jl.git" + @test d.branch == "gh-pages-previews" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=false, + repo_previews="", + branch_previews="" + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.SSH + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular pull request build with SSH deploy key (SSH key prioritized), but push previews to a different repo and different branch; use a different deploy key for previews + withenv( + "GITHUB_EVENT_NAME" => "pull_request", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/pull/1962/merge", "GITHUB_ACTOR" => "github-actions", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY_PREVIEWS" => "SGVsbG8sIHdvcmxkLw==", ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true, - repo_previews="github.com/JuliaDocs/Documenter-previews.jl.git", - branch_previews="gh-pages-previews") - @test d.all_ok - @test d.subfolder == "previews/PR1962" - @test d.repo == "github.com/JuliaDocs/Documenter-previews.jl.git" - @test d.branch == "gh-pages-previews" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=false, - repo_previews="", - branch_previews="") - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.SSH - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.documenter_key_previews(cfg) === "SGVsbG8sIHdvcmxkLw==" + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true, + repo_previews="github.com/JuliaDocs/Documenter-previews.jl.git", + branch_previews="gh-pages-previews" + ) + @test d.all_ok + @test d.subfolder == "previews/PR1962" + @test d.repo == "github.com/JuliaDocs/Documenter-previews.jl.git" + @test d.branch == "gh-pages-previews" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=false, + repo_previews="", + branch_previews="" + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.SSH + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.documenter_key_previews(cfg) === "SGVsbG8sIHdvcmxkLw==" + end end - end - # Missing environment variables - withenv("GITHUB_EVENT_NAME" => "push", + # Missing environment variables + withenv( + "GITHUB_EVENT_NAME" => "push", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/v1.2.3", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => nothing, "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end end -end end +end -@testset "GitLab CI deploy configuration" begin; with_logger(NullLogger()) do - # Regular tag build - withenv("GITLAB_CI" => "true", +@testset "GitLab CI deploy configuration" begin + with_logger(NullLogger()) do + # Regular tag build + withenv( + "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "master", "CI_EXTERNAL_PULL_REQUEST_IID" => "", "CI_PROJECT_PATH_SLUG" => "juliadocs-documenter-jl", @@ -351,18 +509,24 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Regular tag build with tag_prefix - withenv("GITLAB_CI" => "true", + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Regular tag build with tag_prefix + withenv( + "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "master", "CI_EXTERNAL_PULL_REQUEST_IID" => "", "CI_PROJECT_PATH_SLUG" => "juliadocs-documenter-jl", @@ -370,19 +534,25 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Broken tag build - withenv("GITLAB_CI" => "true", + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Broken tag build + withenv( + "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "master", "CI_EXTERNAL_PULL_REQUEST_IID" => "", "CI_PROJECT_PATH_SLUG" => "juliadocs-documenter-jl", @@ -390,13 +560,18 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular/broken devbranch build - withenv( + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular/broken devbranch build + withenv( "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "master", "CI_EXTERNAL_PULL_REQUEST_IID" => "", @@ -405,20 +580,31 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular pull request build - withenv("GITLAB_CI" => "true", + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular pull request build + withenv( + "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "something", "CI_EXTERNAL_PULL_REQUEST_IID" => "42", "CI_PROJECT_PATH_SLUG" => "juliadocs-documenter-jl", @@ -426,20 +612,30 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Missing/broken environment variables - withenv( + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Missing/broken environment variables + withenv( "GITLAB_CI" => "true", "CI_COMMIT_BRANCH" => "master", "CI_EXTERNAL_PULL_REQUEST_IID" => "", @@ -448,137 +644,198 @@ end end "CI_PIPELINE_SOURCE" => "push", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitLab() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - end - # Build on `schedule` jobs - withenv("GITHUB_EVENT_NAME" => "schedule", + cfg = Documenter.GitLab() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + end + # Build on `schedule` jobs + withenv( + "GITHUB_EVENT_NAME" => "schedule", "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", "GITHUB_REF" => "refs/tags/v1.2.3", "GITHUB_ACTOR" => "github-actions", "GITHUB_TOKEN" => "SGVsbG8sIHdvcmxkLg==", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.GitHubActions() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + cfg = Documenter.GitHubActions() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://github-actions:SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end end -end end +end -@testset "Buildkite CI deploy configuration" begin; with_logger(NullLogger()) do - # Regular tag build - withenv("BUILDKITE" => "true", +@testset "Buildkite CI deploy configuration" begin + with_logger(NullLogger()) do + # Regular tag build + withenv( + "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "master", "BUILDKITE_PULL_REQUEST" => "false", "BUILDKITE_TAG" => "v1.2.3", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Regular tag build with tag_prefix - withenv("BUILDKITE" => "true", + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Regular tag build with tag_prefix + withenv( + "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "master", "BUILDKITE_PULL_REQUEST" => "false", "BUILDKITE_TAG" => "MySubPackage-v1.2.3", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - @test Documenter.authentication_method(cfg) === Documenter.SSH - end - # Broken tag build - withenv("BUILDKITE" => "true", + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + @test Documenter.authentication_method(cfg) === Documenter.SSH + end + # Broken tag build + withenv( + "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "master", "BUILDKITE_PULL_REQUEST" => "false", "BUILDKITE_TAG" => "not-a-version", "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular/broken devbranch build - withenv( + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular/broken devbranch build + withenv( "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "master", "BUILDKITE_PULL_REQUEST" => "false", "BUILDKITE_TAG" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Regular pull request build - withenv("BUILDKITE" => "true", + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Regular pull request build + withenv( + "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "something", "BUILDKITE_PULL_REQUEST" => "42", "BUILDKITE_TAG" => nothing, "DOCUMENTER_KEY" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" - @test d.branch == "gh-pages" - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" - end - # Missing/broken environment variables - withenv( + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "github.com/JuliaDocs/Documenter.jl.git" + @test d.branch == "gh-pages" + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.documenter_key(cfg) === "SGVsbG8sIHdvcmxkLg==" + end + # Missing/broken environment variables + withenv( "BUILDKITE" => "true", "BUILDKITE_BRANCH" => "master", "BUILDKITE_PULL_REQUEST" => "false", "BUILDKITE_TAG" => "v1.2.3", "DOCUMENTER_KEY" => nothing, ) do - cfg = Documenter.Buildkite() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok + cfg = Documenter.Buildkite() + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + end end -end end +end -@testset "Woodpecker CI deploy configuration" begin; with_logger(NullLogger()) do - @testset "Woodpecker 0.15.x" begin; with_logger(NullLogger()) do - # Regular tag build with PROJECT_ACCESS_TOKEN - withenv( +@testset "Woodpecker CI deploy configuration" begin + with_logger(NullLogger()) do + @testset "Woodpecker 0.15.x" begin + with_logger(NullLogger()) do + # Regular tag build with PROJECT_ACCESS_TOKEN + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "push", "CI" => "woodpecker", @@ -588,19 +845,25 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "push", "CI" => "woodpecker", @@ -610,20 +873,26 @@ end end "CI_COMMIT_TAG" => "MySubPackage-v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Broken tag build - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Broken tag build + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "push", "CI" => "woodpecker", @@ -633,13 +902,18 @@ end end "CI_COMMIT_TAG" => "not-a-version", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular devbranch build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular devbranch build + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "push", "CI" => "woodpecker", @@ -649,21 +923,32 @@ end end "CI_COMMIT_TAG" => nothing, "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular pull request build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular pull request build + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "pull_request", "CI_COMMIT_PULL_REQUEST" => "42", @@ -674,21 +959,32 @@ end end "CI_BUILD_EVENT" => "pull_request", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Not a pull request - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Not a pull request + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI_BUILD_EVENT" => "push", "CI_COMMIT_PULL_REQUEST" => "42", @@ -699,13 +995,18 @@ end end "CI_BUILD_EVENT" => "push", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok - end - # Missing environment variables - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end + # Missing environment variables + withenv( "CI_SYSTEM_VERSION" => "0.15.0", "CI" => "woodpecker", "CI_REPO_LINK" => "https://github.com/JuliaDocs/Documenter.jl", @@ -714,15 +1015,22 @@ end end "CI_COMMIT_REF" => "refs/pull/42/merge", "PROJECT_ACCESS_TOKEN" => nothing, ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end end - end end - @testset "Woodpecker 1.0.0" begin; with_logger(NullLogger()) do - # Regular tag build with PROJECT_ACCESS_TOKEN - withenv( + end + @testset "Woodpecker 1.0.0" begin + with_logger(NullLogger()) do + # Regular tag build with PROJECT_ACCESS_TOKEN + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -732,19 +1040,25 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -754,20 +1068,26 @@ end end "CI_COMMIT_TAG" => "MySubPackage-v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Broken tag build - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Broken tag build + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -777,13 +1097,18 @@ end end "CI_COMMIT_TAG" => "not-a-version", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular devbranch build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular devbranch build + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -793,21 +1118,32 @@ end end "CI_COMMIT_TAG" => nothing, "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular pull request build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular pull request build + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "pull_request", "CI_COMMIT_PULL_REQUEST" => "42", @@ -818,21 +1154,32 @@ end end "CI_PIPELINE_EVENT" => "pull_request", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Not a pull request - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Not a pull request + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI_PIPELINE_EVENT" => "push", "CI_COMMIT_PULL_REQUEST" => "42", @@ -843,13 +1190,18 @@ end end "CI_PIPELINE_EVENT" => "push", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok - end - # Missing environment variables - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end + # Missing environment variables + withenv( "CI_SYSTEM_VERSION" => "1.0.0", "CI" => "woodpecker", "CI_PIPELINE_EVENT" => "pull_request", @@ -858,15 +1210,22 @@ end end "CI_FORGE_URL" => "https://github.com", "PROJECT_ACCESS_TOKEN" => nothing, ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end end - end end - @testset "Woodpecker 2.0.0" begin; with_logger(NullLogger()) do - # Regular tag build with PROJECT_ACCESS_TOKEN - withenv( + end + @testset "Woodpecker 2.0.0" begin + with_logger(NullLogger()) do + # Regular tag build with PROJECT_ACCESS_TOKEN + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -876,19 +1235,25 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular tag build with PROJECT_ACCESS_TOKEN and with tag prefix + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -898,20 +1263,26 @@ end end "CI_COMMIT_TAG" => "MySubPackage-v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true, - tag_prefix="MySubPackage-") + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true, + tag_prefix="MySubPackage-" + ) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Broken tag build - withenv( + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Broken tag build + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -921,13 +1292,18 @@ end end "CI_COMMIT_TAG" => "not-a-version", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) - @test !d.all_ok - end - # Regular devbranch build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test !d.all_ok + end + # Regular devbranch build + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -937,21 +1313,32 @@ end end "CI_COMMIT_TAG" => nothing, "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "hello-world" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="not-master", devurl="hello-world", push_preview=true) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Regular pull request build - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "hello-world" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="not-master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Regular pull request build + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "pull_request", "CI_COMMIT_PULL_REQUEST" => "42", @@ -962,21 +1349,32 @@ end end "CI_PIPELINE_EVENT" => "pull_request", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test d.all_ok - @test d.subfolder == "previews/PR42" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=false) - @test !d.all_ok - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Not a pull request - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "previews/PR42" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=false + ) + @test !d.all_ok + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Not a pull request + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI_PIPELINE_EVENT" => "push", "CI_COMMIT_PULL_REQUEST" => "42", @@ -987,13 +1385,18 @@ end end "CI_PIPELINE_EVENT" => "push", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok - end - # Missing environment variables - withenv( + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end + # Missing environment variables + withenv( "CI_SYSTEM_VERSION" => "2.0.0", "CI" => "woodpecker", "CI_PIPELINE_EVENT" => "pull_request", @@ -1002,15 +1405,22 @@ end end "CI_FORGE_URL" => "https://github.com", "PROJECT_ACCESS_TOKEN" => nothing, ) do - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="hello-world", push_preview=true) - @test !d.all_ok + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="hello-world", + push_preview=true + ) + @test !d.all_ok + end end - end end - @testset "Woodpecker CI No CI_SYSTEM_VERSION environmental variable" begin; with_logger(NullLogger()) do - # Regular tag build with PROJECT_ACCESS_TOKEN but no CI_SYSTEM_VERSION. It should throw. - withenv( + end + @testset "Woodpecker CI No CI_SYSTEM_VERSION environmental variable" begin + with_logger(NullLogger()) do + # Regular tag build with PROJECT_ACCESS_TOKEN but no CI_SYSTEM_VERSION. It should throw. + withenv( "CI_SYSTEM_VERSION" => nothing, "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -1020,13 +1430,15 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - @test_throws KeyError cfg = Documenter.Woodpecker() + @test_throws KeyError cfg = Documenter.Woodpecker() + end end - end end - @testset "Woodpecker CI Unreleased Versions" begin; with_logger(NullLogger()) do - # NOTE Unreleased versions starts with `next-` - # Regular tag build with PROJECT_ACCESS_TOKEN with next unreleased version. - withenv( + end + @testset "Woodpecker CI Unreleased Versions" begin + with_logger(NullLogger()) do + # NOTE Unreleased versions starts with `next-` + # Regular tag build with PROJECT_ACCESS_TOKEN with next unreleased version. + withenv( "CI_SYSTEM_VERSION" => "next-woodpeckerversion", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -1036,19 +1448,25 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - @test_warn r"(You are currently using an unreleased version of Woodpecker)*" Documenter.Woodpecker() - cfg = Documenter.Woodpecker() - d = Documenter.deploy_folder(cfg; repo="JuliaDocs/Documenter.jl", - devbranch="master", devurl="dev", push_preview=true) - @test d.all_ok - @test d.subfolder == "v1.2.3" - @test d.repo == "JuliaDocs/Documenter.jl" - @test d.branch == "pages" - @test Documenter.authentication_method(cfg) === Documenter.HTTPS - @test Documenter.authenticated_repo_url(cfg) === "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" - end - # Incorrect `next` version - withenv( + @test_warn r"(You are currently using an unreleased version of Woodpecker)*" Documenter.Woodpecker() + cfg = Documenter.Woodpecker() + d = Documenter.deploy_folder( + cfg; + repo="JuliaDocs/Documenter.jl", + devbranch="master", + devurl="dev", + push_preview=true + ) + @test d.all_ok + @test d.subfolder == "v1.2.3" + @test d.repo == "JuliaDocs/Documenter.jl" + @test d.branch == "pages" + @test Documenter.authentication_method(cfg) === Documenter.HTTPS + @test Documenter.authenticated_repo_url(cfg) === + "https://SGVsbG8sIHdvcmxkLg==@github.com/JuliaDocs/Documenter.jl.git" + end + # Incorrect `next` version + withenv( "CI_SYSTEM_VERSION" => "notnext-indeed", "CI_PIPELINE_EVENT" => "push", "CI" => "woodpecker", @@ -1058,82 +1476,94 @@ end end "CI_COMMIT_TAG" => "v1.2.3", "PROJECT_ACCESS_TOKEN" => "SGVsbG8sIHdvcmxkLg==", ) do - @test_throws ArgumentError cfg = Documenter.Woodpecker() + @test_throws ArgumentError cfg = Documenter.Woodpecker() + end end - end end -end end + end + end +end struct CustomConfig <: Documenter.DeployConfig end -Documenter.deploy_folder(::CustomConfig; kwargs...) = Documenter.DeployDecision(; all_ok = true, subfolder = "v1.2.3") +Documenter.deploy_folder(::CustomConfig; kwargs...) = + Documenter.DeployDecision(; all_ok=true, subfolder="v1.2.3") struct BrokenConfig <: Documenter.DeployConfig end -@testset "Custom configuration" begin; with_logger(NullLogger()) do +@testset "Custom configuration" begin + with_logger(NullLogger()) do cfg = CustomConfig() - d = Documenter.deploy_folder(cfg; repo="github.com/JuliaDocs/Documenter.jl.git", - devbranch="master", devurl="dev", push_preview=true) + d = Documenter.deploy_folder( + cfg; + repo="github.com/JuliaDocs/Documenter.jl.git", + devbranch="master", + devurl="dev", + push_preview=true + ) @test d.all_ok @test d.subfolder == "v1.2.3" cfg = BrokenConfig() - @test (@test_logs (:warn, r"Documenter\.deploy_folder\(::BrokenConfig; kwargs\.\.\.\) not implemented") Documenter.deploy_folder(cfg)) == Documenter.DeployDecision(; all_ok = false) - @test (@test_logs (:warn, r"Documenter could not auto-detect") Documenter.deploy_folder(nothing)) == Documenter.DeployDecision(; all_ok = false) -end end + @test (@test_logs ( + :warn, + r"Documenter\.deploy_folder\(::BrokenConfig; kwargs\.\.\.\) not implemented" + ) Documenter.deploy_folder(cfg)) == Documenter.DeployDecision(; all_ok=false) + @test (@test_logs (:warn, r"Documenter could not auto-detect") Documenter.deploy_folder( + nothing + )) == Documenter.DeployDecision(; all_ok=false) + end +end @testset "Autodetection of deploy system" begin - withenv("TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", - "GITHUB_REPOSITORY" => nothing, - ) do + withenv( + "TRAVIS_REPO_SLUG" => "JuliaDocs/Documenter.jl", + "GITHUB_REPOSITORY" => nothing, + ) do cfg = Documenter.auto_detect_deploy_system() @test cfg isa Documenter.Travis end - withenv("TRAVIS_REPO_SLUG" => nothing, - "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", - ) do + withenv( + "TRAVIS_REPO_SLUG" => nothing, + "GITHUB_REPOSITORY" => "JuliaDocs/Documenter.jl", + ) do cfg = Documenter.auto_detect_deploy_system() @test cfg isa Documenter.GitHubActions end - withenv("TRAVIS_REPO_SLUG" => nothing, - "GITHUB_REPOSITORY" => nothing, - ) do + withenv("TRAVIS_REPO_SLUG" => nothing, "GITHUB_REPOSITORY" => nothing,) do cfg = Documenter.auto_detect_deploy_system() @test cfg === nothing end # Woodpecker requires a CI_SYSTEM_VERSION env var - withenv("CI" => "woodpecker", - "CI_SYSTEM_VERSION" => "1.0.0", - "GITHUB_REPOSITORY" => nothing - ) do + withenv( + "CI" => "woodpecker", + "CI_SYSTEM_VERSION" => "1.0.0", + "GITHUB_REPOSITORY" => nothing + ) do cfg = Documenter.auto_detect_deploy_system() @test cfg isa Documenter.Woodpecker end - withenv("CI" => "woodpecker", - "CI_SYSTEM_VERSION" => "0.15.0", - "GITHUB_REPOSITORY" => nothing - ) do + withenv( + "CI" => "woodpecker", + "CI_SYSTEM_VERSION" => "0.15.0", + "GITHUB_REPOSITORY" => nothing + ) do cfg = Documenter.auto_detect_deploy_system() @test cfg isa Documenter.Woodpecker end - withenv("CI" => "woodpecker", - "GITHUB_REPOSITORY" => nothing - ) do - @test_throws KeyError cfg = Documenter.auto_detect_deploy_system() + withenv("CI" => "woodpecker", "GITHUB_REPOSITORY" => nothing) do + @test_throws KeyError cfg = Documenter.auto_detect_deploy_system() end # Drone compatibility ends post-1.0.0 - withenv("CI" => "drone", - "CI_SYSTEM_VERSION" => "0.15.0", - "GITHUB_REPOSITORY" => nothing - ) do + withenv( + "CI" => "drone", + "CI_SYSTEM_VERSION" => "0.15.0", + "GITHUB_REPOSITORY" => nothing + ) do cfg = Documenter.auto_detect_deploy_system() @test cfg isa Documenter.Woodpecker end - withenv("CI" => "drone", - "GITHUB_REPOSITORY" => nothing - ) do + withenv("CI" => "drone", "GITHUB_REPOSITORY" => nothing) do @test_throws KeyError cfg = Documenter.auto_detect_deploy_system() end - withenv("CI" => nothing, - "GITHUB_REPOSITORY" => nothing - ) do + withenv("CI" => nothing, "GITHUB_REPOSITORY" => nothing) do cfg = Documenter.auto_detect_deploy_system() @test cfg === nothing end @@ -1158,7 +1588,9 @@ end uhu = Documenter.user_host_upstream("user@subdom.long-page.com:/path/to/repo") @test uhu == ("user", "subdom.long-page.com", "user@subdom.long-page.com:path/to/repo") - @test_throws ErrorException Documenter.user_host_upstream("https://github.com/JuliaDocs/Documenter.jl.git") + @test_throws ErrorException Documenter.user_host_upstream( + "https://github.com/JuliaDocs/Documenter.jl.git" + ) @test_throws ErrorException Documenter.user_host_upstream("user@subdom.long-page.com") end diff --git a/test/deploydocs.jl b/test/deploydocs.jl index 27fdac0007..43bb7b6aa0 100644 --- a/test/deploydocs.jl +++ b/test/deploydocs.jl @@ -1,13 +1,14 @@ using Documenter: Documenter, deploydocs, git using Test -include("TestUtilities.jl"); using Main.TestUtilities +include("TestUtilities.jl"); +using Main.TestUtilities; struct TestDeployConfig <: Documenter.DeployConfig - repo_path :: String - subfolder :: String + repo_path::String + subfolder::String end function Documenter.deploy_folder(c::TestDeployConfig; branch, repo, kwargs...) - Documenter.DeployDecision(; all_ok = true, subfolder = c.subfolder, branch, repo) + Documenter.DeployDecision(; all_ok=true, subfolder=c.subfolder, branch, repo) end Documenter.authentication_method(::TestDeployConfig) = Documenter.HTTPS Documenter.authenticated_repo_url(c::TestDeployConfig) = c.repo_path @@ -23,40 +24,40 @@ Documenter.authenticated_repo_url(c::TestDeployConfig) = c.repo_path write("build/page.html", "...") # Create gh-pages and deploy dev/ @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "dev"), - repo = full_repo_path, - devbranch = "master", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "dev"), + repo=full_repo_path, + devbranch="master", ) # Deploy 1.0.0 tag @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "1.0.0"), - repo = full_repo_path, - devbranch = "master", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "1.0.0"), + repo=full_repo_path, + devbranch="master", ) # Deploy 1.1.0 tag @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "1.1.0"), - repo = full_repo_path, - devbranch = "master", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "1.1.0"), + repo=full_repo_path, + devbranch="master", ) # Deploy 2.0.0 tag, but into an archive (so nothing pushed to gh-pages) @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "2.0.0"), - repo = full_repo_path, - devbranch = "master", - archive = joinpath(pwd(), "ghpages.tar.gz"), + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "2.0.0"), + repo=full_repo_path, + devbranch="master", + archive=joinpath(pwd(), "ghpages.tar.gz"), ) # Deploy 3.0.0 tag with a tag_prefix---which does not change deployment behavior @quietly deploydocs(; - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "3.0"), - repo = full_repo_path, - devbranch = "master", - tag_prefix = "MySubPackage-", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "3.0"), + repo=full_repo_path, + devbranch="master", + tag_prefix="MySubPackage-", ) # Check what we have in gh-pages now: run(`$(git()) clone -q -b gh-pages $(full_repo_path) worktree`) @@ -78,11 +79,29 @@ Documenter.authenticated_repo_url(c::TestDeployConfig) = c.repo_path @test islink(joinpath("worktree", "v3")) @test islink(joinpath("worktree", "v3.0")) @test islink(joinpath("worktree", "v3.0.0")) - + # key_prefix does not affect/is not present in worktree directories - @test issetequal([".git", "1.0.0", "1.1.0", "3.0", "dev", "index.html", - "stable", "v1", "v1.0", "v1.0.0", "v1.1", "v1.1.0", - "v3", "v3.0", "v3.0.0", "versions.js"], readdir("worktree")) + @test issetequal( + [ + ".git", + "1.0.0", + "1.1.0", + "3.0", + "dev", + "index.html", + "stable", + "v1", + "v1.0", + "v1.0.0", + "v1.1", + "v1.1.0", + "v3", + "v3.0", + "v3.0.0", + "versions.js" + ], + readdir("worktree") + ) end end end @@ -101,27 +120,27 @@ end subpackage_doc_dir = joinpath("PackageA.jl", "docs", "build") mkpath(joinpath("PackageA.jl", "docs", "build")) write(joinpath(subpackage_doc_dir, "page.html"), "...") - + # Use different versions for each set of docs to make it easier to see # where the version has been deplyed. # Deploy 1.0.0 tag - top level repo @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "1.0.0"), - repo = full_repo_path, - devbranch = "master", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "1.0.0"), + repo=full_repo_path, + devbranch="master", ) # Deploy 2.0.0 tag - subpackage # Note: setting the `tag_prefix here is not actually necessary or used # BECAUSE we're using a TestDeployConfig, but we're setting it here # anyway so that this example can be used to model true implementation. @quietly deploydocs( - root = pwd(), - deploy_config = TestDeployConfig(full_repo_path, "2.0.0"), - repo = full_repo_path, - devbranch = "master", + root=pwd(), + deploy_config=TestDeployConfig(full_repo_path, "2.0.0"), + repo=full_repo_path, + devbranch="master", dirname="PackageA.jl", - tag_prefix="PackageA-", + tag_prefix="PackageA-", ) # Check what we have in worktree: @@ -136,14 +155,15 @@ end @test isfile(joinpath("worktree", "index.html")) @test isfile(joinpath("worktree", "versions.js")) @test isfile(joinpath("worktree", "PackageA.jl", "index.html")) - @test isfile(joinpath("worktree", "PackageA.jl", "versions.js")) - + @test isfile(joinpath("worktree", "PackageA.jl", "versions.js")) + # ...and check that (because only one release per package) the versions # are identical except for the (intentional) version number top_versions = readlines(joinpath("worktree", "versions.js")) - subpackage_versions = readlines(joinpath("worktree", "PackageA.jl", "versions.js")) + subpackage_versions = + readlines(joinpath("worktree", "PackageA.jl", "versions.js")) for (i, (t_line, s_line)) in enumerate(zip(top_versions, subpackage_versions)) - if i in [3, 5] + if i in [3, 5] @test contains(s_line, "2.0") @test isequal(t_line, replace(s_line, "2.0" => "1.0")) else diff --git a/test/docchecks.jl b/test/docchecks.jl index 92a1679ba0..528976cec2 100644 --- a/test/docchecks.jl +++ b/test/docchecks.jl @@ -8,64 +8,64 @@ using Documenter: linkcheck, allbindings # The following modules set up a few docstrings for allbindings tests module Dep1 - "dep1_private" - dep1_private() = nothing - "dep1_private_2" - dep1_private_2() = nothing - "dep1_exported" - dep1_exported() = nothing - export dep1_exported - "dep1_reexported" - dep1_reexported() = nothing - # test for shadowing exports - "bar" - bar() = nothing +"dep1_private" +dep1_private() = nothing +"dep1_private_2" +dep1_private_2() = nothing +"dep1_exported" +dep1_exported() = nothing +export dep1_exported +"dep1_reexported" +dep1_reexported() = nothing +# test for shadowing exports +"bar" +bar() = nothing end module Dep2 - # This module extends a function from Dep1, but creates a new local binding - # for it, to reproduce the case reported in - # https://github.com/JuliaDocs/Documenter.jl/issues/1695 - using ..Dep1: Dep1 - const dep1_private = Dep1.dep1_private - "Dep2: dep1_private" - dep1_private(::Int) = nothing +# This module extends a function from Dep1, but creates a new local binding +# for it, to reproduce the case reported in +# https://github.com/JuliaDocs/Documenter.jl/issues/1695 +using ..Dep1: Dep1 +const dep1_private = Dep1.dep1_private +"Dep2: dep1_private" +dep1_private(::Int) = nothing end module TestModule - # Standard case of attaching a docstring to a local binding - "local_binding" - local_binding() = nothing - "local_binding" - local_binding_exported() = nothing - export local_binding_exported +# Standard case of attaching a docstring to a local binding +"local_binding" +local_binding() = nothing +"local_binding" +local_binding_exported() = nothing +export local_binding_exported - # These extend functions from another module (package). The bindings should - # all be Dep1.XXX, rather than TestModule.XXX - using ..Dep1 - "TestModule : dep1_private" - Dep1.dep1_private(::Any) = nothing - "TestModule : dep1_exported" - Dep1.dep1_exported(::Any) = nothing - import ..Dep1: dep1_private_2 - "TestModule : dep1_private_2" - dep1_private_2(::Any) = nothing - # Re-export of a binding from another module - import ..Dep1: dep1_reexported - "TestModule : dep1_reexported" - dep1_reexported(::Any) = nothing - export dep1_reexported - # This also extends Dep1.dep1_private, but the docstring should get attached - # to the Dep2.dep1_private binding because of the assignment. - using ..Dep2: Dep2 - "TestModuleDep2: Dep2.dep1_private" - Dep2.dep1_private(::Any, ::Any) = nothing +# These extend functions from another module (package). The bindings should +# all be Dep1.XXX, rather than TestModule.XXX +using ..Dep1 +"TestModule : dep1_private" +Dep1.dep1_private(::Any) = nothing +"TestModule : dep1_exported" +Dep1.dep1_exported(::Any) = nothing +import ..Dep1: dep1_private_2 +"TestModule : dep1_private_2" +dep1_private_2(::Any) = nothing +# Re-export of a binding from another module +import ..Dep1: dep1_reexported +"TestModule : dep1_reexported" +dep1_reexported(::Any) = nothing +export dep1_reexported +# This also extends Dep1.dep1_private, but the docstring should get attached +# to the Dep2.dep1_private binding because of the assignment. +using ..Dep2: Dep2 +"TestModuleDep2: Dep2.dep1_private" +Dep2.dep1_private(::Any, ::Any) = nothing - # This tests the case where there is an undocumented but exported local function - # that shares the name with a documented function from another module, potentially - # confusing the isexported check. - const bar = nothing - export bar - "Dep1.bar" - Dep1.bar(::Any) = nothing +# This tests the case where there is an undocumented but exported local function +# that shares the name with a documented function from another module, potentially +# confusing the isexported check. +const bar = nothing +export bar +"Dep1.bar" +Dep1.bar(::Any) = nothing end @testset "doc checks" begin @@ -83,9 +83,11 @@ end @test Docs.Binding(TestModule, :dep1_private) != Docs.Binding(Dep2, :dep1_private) # These three bindings are imported into the TestModule scope, so the Binding objects # automatically resolve to the Dep1.X bindings. - @test Docs.Binding(TestModule, :dep1_private_2) == Docs.Binding(Dep1, :dep1_private_2) + @test Docs.Binding(TestModule, :dep1_private_2) == + Docs.Binding(Dep1, :dep1_private_2) @test Docs.Binding(TestModule, :dep1_exported) == Docs.Binding(Dep1, :dep1_exported) - @test Docs.Binding(TestModule, :dep1_reexported) == Docs.Binding(Dep1, :dep1_reexported) + @test Docs.Binding(TestModule, :dep1_reexported) == + Docs.Binding(Dep1, :dep1_reexported) # There is a TestModule.bar, but it's not the same as Dep1.bar, but the latter has # a docstring in TestModule. @test Docs.Binding(TestModule, :bar) != Docs.Binding(Dep1, :bar) diff --git a/test/docstring_links/make.jl b/test/docstring_links/make.jl index 535bd8d630..c33362d66f 100644 --- a/test/docstring_links/make.jl +++ b/test/docstring_links/make.jl @@ -5,10 +5,10 @@ import IOCapture module InvalidLinks - export f +export f - """Link to [invalid](http://domain.invalid/docstring.html)""" - f(x) = x +"""Link to [invalid](http://domain.invalid/docstring.html)""" +f(x) = x end @@ -16,12 +16,12 @@ end @testset "invalid links in docstring" begin c = IOCapture.capture(; rethrow=Union{}) do makedocs(; - root = dirname(@__FILE__), - modules = InvalidLinks, - sitename = "InvalidLinks Checks", - warnonly = false, - linkcheck = true, - debug = false + root=dirname(@__FILE__), + modules=InvalidLinks, + sitename="InvalidLinks Checks", + warnonly=false, + linkcheck=true, + debug=false ) end @test contains(c.output, r"Error:.*http://domain.invalid/index.html") diff --git a/test/docsystem.jl b/test/docsystem.jl index 2d94e12285..310732b510 100644 --- a/test/docsystem.jl +++ b/test/docsystem.jl @@ -7,30 +7,30 @@ import Documenter: Documenter, DocSystem const alias_of_getdocs = DocSystem.getdocs # NOTE: won't get docstrings if in a @testset module TestDocstrings - "A" - struct A end - "A(x)" - A(x) = A() - B = A +"A" +struct A end +"A(x)" +A(x) = A() +B = A - "foo(::Number)" - foo(::Number) = nothing +"foo(::Number)" +foo(::Number) = nothing - "foo(::Float64)" - foo(::Float64) = nothing +"foo(::Float64)" +foo(::Float64) = nothing - const bar = foo - const baz = foo +const bar = foo +const baz = foo - "baz(::Number)" - baz(::Number) +"baz(::Number)" +baz(::Number) - "baz(::Float64)" - baz(::Float64) +"baz(::Float64)" +baz(::Float64) - using Markdown: @doc_str - @doc doc"qux(::Float64)" - qux(::Float64) +using Markdown: @doc_str +@doc doc"qux(::Float64)" +qux(::Float64) end @testset "DocSystem" begin @@ -71,12 +71,12 @@ end let b = DocSystem.binding(DocSystem, :getdocs), d_0 = DocSystem.getdocs(b, Tuple{}), d_1 = DocSystem.getdocs(b), - d_2 = DocSystem.getdocs(b, Union{Tuple{Any}, Tuple{Any, Type}}; compare = (==)), - d_3 = DocSystem.getdocs(b; modules = Module[Main]), + d_2 = DocSystem.getdocs(b, Union{Tuple{Any},Tuple{Any,Type}}; compare=(==)), + d_3 = DocSystem.getdocs(b; modules=Module[Main]), d_4 = DocSystem.getdocs(DocSystem.binding(@__MODULE__, :alias_of_getdocs)), - d_5 = DocSystem.getdocs(DocSystem.binding(@__MODULE__, :alias_of_getdocs); aliases = false), - d_6 = DocSystem.getdocs(b, Union{Tuple{Docs.Binding}, Tuple{Docs.Binding, Type}}; compare = (==)), - d_7 = DocSystem.getdocs(DocSystem.binding(@__MODULE__, :alias_of_getdocs), Union{Tuple{Docs.Binding}, Tuple{Docs.Binding, Type}}) + d_5 = DocSystem.getdocs(DocSystem.binding(@__MODULE__, :alias_of_getdocs); aliases=false), + d_6 = DocSystem.getdocs(b, Union{Tuple{Docs.Binding},Tuple{Docs.Binding,Type}}; compare=(==)), + d_7 = DocSystem.getdocs(DocSystem.binding(@__MODULE__, :alias_of_getdocs), Union{Tuple{Docs.Binding},Tuple{Docs.Binding,Type}}) @test length(d_0) == 0 @test length(d_1) == 2 @@ -89,18 +89,18 @@ end @test d_1[1].data[:binding] == b @test d_1[2].data[:binding] == b - @test d_1[1].data[:typesig] == Union{Tuple{Docs.Binding}, Tuple{Docs.Binding, Type}} - @test d_1[2].data[:typesig] == Union{Tuple{Any}, Tuple{Any, Type}} - @test d_1[1].data[:module] == DocSystem - @test d_1[2].data[:module] == DocSystem + @test d_1[1].data[:typesig] == Union{Tuple{Docs.Binding},Tuple{Docs.Binding,Type}} + @test d_1[2].data[:typesig] == Union{Tuple{Any},Tuple{Any,Type}} + @test d_1[1].data[:module] == DocSystem + @test d_1[2].data[:module] == DocSystem @test d_2[1].data[:binding] == b - @test d_2[1].data[:typesig] == Union{Tuple{Any}, Tuple{Any, Type}} - @test d_2[1].data[:module] == DocSystem + @test d_2[1].data[:typesig] == Union{Tuple{Any},Tuple{Any,Type}} + @test d_2[1].data[:module] == DocSystem @test d_6[1].data[:binding] == b - @test d_6[1].data[:typesig] == Union{Tuple{Docs.Binding}, Tuple{Docs.Binding, Type}} - @test d_6[1].data[:module] == DocSystem + @test d_6[1].data[:typesig] == Union{Tuple{Docs.Binding},Tuple{Docs.Binding,Type}} + @test d_6[1].data[:module] == DocSystem @test d_1 == d_4 @test d_1 != d_5 @@ -134,7 +134,10 @@ end foo_1 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo)) foo_2 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{Int}) foo_3 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{Float64}) - foo_4 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{AbstractFloat}) + foo_4 = Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :foo), + Tuple{AbstractFloat} + ) foo_5 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{Number}) foo_6 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{Any}) @@ -156,28 +159,65 @@ end @test foo_3[1] ∈ foo_1 # setting 'compare' to subtype, will fetch both docstrings - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :foo), Tuple{Float64}, compare = (<:)) == foo_1 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :foo), + Tuple{Float64}, + compare=(<:) + ) == foo_1 # bar is an alias, so falls back to foo @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar)) == foo_1 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Int}) == foo_2 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Float64}) == foo_3 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{AbstractFloat}) == foo_4 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Number}) == foo_5 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Any}) == foo_6 + @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Int}) == + foo_2 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{Float64} + ) == foo_3 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{AbstractFloat} + ) == foo_4 + @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Number}) == + foo_5 + @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Any}) == + foo_6 # unless we disable following aliases - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar); aliases = false) |> isempty - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Int}; aliases = false) |> isempty - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Float64}; aliases = false) |> isempty - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{AbstractFloat}; aliases = false) |> isempty - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Number}; aliases = false) |> isempty - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar), Tuple{Any}; aliases = false) |> isempty + @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :bar); aliases=false) |> + isempty + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{Int}; + aliases=false + ) |> isempty + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{Float64}; + aliases=false + ) |> isempty + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{AbstractFloat}; + aliases=false + ) |> isempty + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{Number}; + aliases=false + ) |> isempty + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :bar), + Tuple{Any}; + aliases=false + ) |> isempty # baz, while an alias of foo, has the same 'structure', but different docstrings.. baz_1 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz)) baz_2 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Int}) baz_3 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Float64}) - baz_4 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{AbstractFloat}) + baz_4 = Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{AbstractFloat} + ) baz_5 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Number}) baz_6 = Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Any}) @@ -199,12 +239,33 @@ end @test baz_3[1] ∈ baz_1 # .. even if we disable aliases - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz); aliases = false) == baz_1 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Int}; aliases = false) == baz_2 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Float64}; aliases = false) == baz_3 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{AbstractFloat}; aliases = false) == baz_4 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Number}; aliases = false) == baz_5 - @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz), Tuple{Any}; aliases = false) == baz_6 + @test Documenter.DocSystem.getdocs(Docs.Binding(TestDocstrings, :baz); aliases=false) == + baz_1 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{Int}; + aliases=false + ) == baz_2 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{Float64}; + aliases=false + ) == baz_3 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{AbstractFloat}; + aliases=false + ) == baz_4 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{Number}; + aliases=false + ) == baz_5 + @test Documenter.DocSystem.getdocs( + Docs.Binding(TestDocstrings, :baz), + Tuple{Any}; + aliases=false + ) == baz_6 end end diff --git a/test/doctests/docmeta.jl b/test/doctests/docmeta.jl index 409f3e57a4..e458dca132 100644 --- a/test/doctests/docmeta.jl +++ b/test/doctests/docmeta.jl @@ -23,7 +23,8 @@ end # recursive setting @test DocMeta.getdocmeta(TestMod, :DocTestSetup) == :foo @test DocMeta.getdocmeta(TestMod.Submodule, :DocTestSetup) === nothing - @test DocMeta.setdocmeta!(TestMod, :DocTestSetup, :foo; recursive=true, warn=false) === nothing + @test DocMeta.setdocmeta!(TestMod, :DocTestSetup, :foo; recursive=true, warn=false) === + nothing @test DocMeta.getdocmeta(TestMod, :DocTestSetup) == :foo @test DocMeta.getdocmeta(TestMod.Submodule, :DocTestSetup) == :foo end diff --git a/test/doctests/doctestapi.jl b/test/doctests/doctestapi.jl index dc5ace05e8..746a955e12 100644 --- a/test/doctests/doctestapi.jl +++ b/test/doctests/doctestapi.jl @@ -15,19 +15,19 @@ import IOCapture # ------------------------------------ function run_doctest(f, args...; kwargs...) (result, success, backtrace, output) = - c = IOCapture.capture(rethrow = InterruptException) do - # Running inside a Task to make sure that the parent testsets do not interfere. - t = Task(() -> doctest(args...; kwargs...)) - schedule(t) - # if an exception happens, it gets propagated - try - fetch(t) - catch e - # Note: in Julia 1.3 fetch no longer throws the exception direction, but instead - # wraps it in a TaskFailedException (https://github.com/JuliaLang/julia/pull/32814). - rethrow(t.exception) + c = IOCapture.capture(rethrow=InterruptException) do + # Running inside a Task to make sure that the parent testsets do not interfere. + t = Task(() -> doctest(args...; kwargs...)) + schedule(t) + # if an exception happens, it gets propagated + try + fetch(t) + catch e + # Note: in Julia 1.3 fetch no longer throws the exception direction, but instead + # wraps it in a TaskFailedException (https://github.com/JuliaLang/julia/pull/32814). + rethrow(t.exception) + end end - end @debug """run_doctest($args;, $kwargs) -> $(c.error ? "fail" : "success") ------------------------------------ output ------------------------------------ @@ -63,47 +63,47 @@ julia> x module DocTest3 end module DocTest4 - """ - ```jldoctest - julia> x - 42 - ``` - """ - function foo end - module Submodule - """ - ```jldoctest - julia> x + 1 - 43 - ``` - """ - function foo end - end +""" +```jldoctest +julia> x +42 +``` +""" +function foo end +module Submodule +""" +```jldoctest +julia> x + 1 +43 +``` +""" +function foo end +end end module DocTest5 - """ - ```jldoctest - julia> x - 42 - ``` - """ - function foo end - """ - ```jldoctest - julia> x - 4200 - ``` - """ - module Submodule - """ - ```jldoctest - julia> x + 1 - 4201 - ``` - """ - function foo end - end +""" +```jldoctest +julia> x +42 +``` +""" +function foo end +""" +```jldoctest +julia> x +4200 +``` +""" +module Submodule +""" +```jldoctest +julia> x + 1 +4201 +``` +""" +function foo end +end end """ @@ -184,7 +184,8 @@ Stacktrace: """ module ParseErrorSuccess_1x10 end # The JuliaSyntax swap in 1.10 changed the printing of parse errors quite considerably -ParseErrorSuccess() = (VERSION >= v"1.10.0-DEV.1520") ? ParseErrorSuccess_1x10 : ParseErrorSuccess_1x00 +ParseErrorSuccess() = + (VERSION >= v"1.10.0-DEV.1520") ? ParseErrorSuccess_1x10 : ParseErrorSuccess_1x00 """ ```jldoctest @@ -204,12 +205,12 @@ ERROR: syntax: invalid numeric constant "1.2." module ScriptParseErrorFail end module PR1075 - "x \$(42) y" - function qux end - "..." - function foo end - @doc @doc(foo) function bar end - @doc @doc(bar) function baz end +"x \$(42) y" +function qux end +"..." +function foo end +@doc @doc(foo) function bar end +@doc @doc(bar) function baz end end """ @@ -270,7 +271,7 @@ module BadDocTestKwargs3 end @test !success @test result isa TestSetException end - DocMeta.setdocmeta!(DocTest4, :DocTestSetup, :(x = 42); recursive = true, warn = false) + DocMeta.setdocmeta!(DocTest4, :DocTestSetup, :(x = 42); recursive=true, warn=false) run_doctest(nothing, [DocTest4]) do result, success, backtrace, output @test success @test result isa Test.DefaultTestSet @@ -290,7 +291,11 @@ module BadDocTestKwargs3 end # DoctestFilters df = [r"global (filter|FILTER)"] - run_doctest(nothing, [DoctestFilters], doctestfilters=df) do result, success, backtrace, output + run_doctest( + nothing, + [DoctestFilters], + doctestfilters=df + ) do result, success, backtrace, output @test success end diff --git a/test/doctests/doctests.jl b/test/doctests/doctests.jl index 2a6c6a5e81..0be238e434 100644 --- a/test/doctests/doctests.jl +++ b/test/doctests/doctests.jl @@ -24,7 +24,8 @@ mkpath(builds_directory) function run_makedocs(f, mdfiles, modules=Module[]; kwargs...) dir = mktempdir(builds_directory) - srcdir = joinpath(dir, "src"); mkpath(srcdir) + srcdir = joinpath(dir, "src") + mkpath(srcdir) for mdfile in mdfiles cp(joinpath(@__DIR__, "src", mdfile), joinpath(srcdir, mdfile)) @@ -33,15 +34,15 @@ function run_makedocs(f, mdfiles, modules=Module[]; kwargs...) # page" warning. touch(joinpath(srcdir, "index.md")) - c = IOCapture.capture(rethrow = InterruptException) do + c = IOCapture.capture(rethrow=InterruptException) do # In case JULIA_DEBUG is set to something, we'll override that, so that we wouldn't # get some unexpected debug output from makedocs. withenv("JULIA_DEBUG" => "") do makedocs( - sitename = " ", - format = Documenter.HTML(edit_link = "master"), - root = dir, - modules = modules; + sitename=" ", + format=Documenter.HTML(edit_link="master"), + root=dir, + modules=modules; kwargs... ) end @@ -65,11 +66,14 @@ function run_makedocs(f, mdfiles, modules=Module[]; kwargs...) end function printoutput(result, success, backtrace, output) - printstyled("="^80, color=:cyan); println() + printstyled("="^80, color=:cyan) + println() println(output) - printstyled("-"^80, color=:cyan); println() + printstyled("-"^80, color=:cyan) + println() println(repr(result)) - printstyled("-"^80, color=:cyan); println() + printstyled("-"^80, color=:cyan) + println() end function onormalize(s) @@ -92,10 +96,18 @@ function onormalize(s) s = replace(s, r"Base \.[\\/]([A-Za-z0-9\.]+):[0-9]+\s*$"m => s"Base ./\1:LL") # Remove stacktraces - s = replace(s, r"(│\s+Stacktrace:)(\n(│\s+)\[[0-9]+\].*)(\n(│\s+)@.*)?+" => s"\1\\n\3{STACKTRACE}") + s = replace( + s, + r"(│\s+Stacktrace:)(\n(│\s+)\[[0-9]+\].*)(\n(│\s+)@.*)?+" => + s"\1\\n\3{STACKTRACE}" + ) # In Julia 1.9, the printing of UndefVarError has slightly changed (added backticks around binding name) - s = replace(s, r"UndefVarError: `([A-Za-z0-9.]+)` not defined"m => s"UndefVarError: \1 not defined") + s = replace( + s, + r"UndefVarError: `([A-Za-z0-9.]+)` not defined"m => + s"UndefVarError: \1 not defined" + ) # Remove floating point numbers s = replace(s, r"([0-9]*\.[0-9]{8})[0-9]+" => s"\1***") @@ -113,19 +125,26 @@ function is_same_as_file(output, filename) @error """Output does not agree with reference file ref: $(filename) """ - ps(s::AbstractString) = printstyled(stdout, s, '\n'; color=:magenta, bold=true) - "------------------------------------ output ------------------------------------" |> ps + ps(s::AbstractString) = + printstyled(stdout, s, '\n'; color=:magenta, bold=true) + "------------------------------------ output ------------------------------------" |> + ps output |> println - "---------------------------------- reference -----------------------------------" |> ps + "---------------------------------- reference -----------------------------------" |> + ps reference |> println - "------------------------------ onormalize(output) ------------------------------" |> ps + "------------------------------ onormalize(output) ------------------------------" |> + ps onormalize(output) |> println - "---------------------------- onormalize(reference) -----------------------------" |> ps + "---------------------------- onormalize(reference) -----------------------------" |> + ps onormalize(reference) |> println - "------------------------------------- diff -------------------------------------" |> ps + "------------------------------------- diff -------------------------------------" |> + ps diff = Diff{Words}(onormalize(reference), onormalize(output)) diff |> println - "------------------------------------- end --------------------------------------" |> ps + "------------------------------------- end --------------------------------------" |> + ps false else true @@ -148,7 +167,10 @@ function rfile(index::Integer) reference_directory = joinpath(@__DIR__, "stdouts") reference_file, versionmatch = "", MINVERSION for filename in readdir(reference_directory) - m = match(r"^(?[0-9]+)(?:\.v(?[[0-9]+)_(?[0-9]+))?\.stdout$", filename) + m = match( + r"^(?[0-9]+)(?:\.v(?[[0-9]+)_(?[0-9]+))?\.stdout$", + filename + ) # If the regex doesn't match, then we're not interested in this file isnothing(m) && continue # Similarly, we're only interested in collecting up the reference files that match `index` @@ -169,7 +191,8 @@ function rfile(index::Integer) end # If `reference_file` is still an empty string, then the loop above failed because the appropriate # reference file is missing. - isempty(reference_file) && error("Unable to find reference files for $(index).stdout, VERSION=$VERSION") + isempty(reference_file) && + error("Unable to find reference files for $(index).stdout, VERSION=$VERSION") return reference_file end @@ -189,34 +212,53 @@ end @test is_same_as_file(output, rfile(2)) end - run_makedocs(["working.md", "fooworking.md"]; modules=[FooWorking]) do result, success, backtrace, output + run_makedocs( + ["working.md", "fooworking.md"]; + modules=[FooWorking] + ) do result, success, backtrace, output @test success @test is_same_as_file(output, rfile(3)) end - run_makedocs(["working.md", "foobroken.md"]; modules=[FooBroken]) do result, success, backtrace, output + run_makedocs( + ["working.md", "foobroken.md"]; + modules=[FooBroken] + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(4)) end - run_makedocs(["broken.md", "fooworking.md"]; modules=[FooWorking]) do result, success, backtrace, output + run_makedocs( + ["broken.md", "fooworking.md"]; + modules=[FooWorking] + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(5)) end for warnonly in (false, :autodocs_block, Documenter.except(:doctest)) - run_makedocs(["broken.md", "foobroken.md"]; modules=[FooBroken], warnonly) do result, success, backtrace, output + run_makedocs( + ["broken.md", "foobroken.md"]; + modules=[FooBroken], + warnonly + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(6)) end end - run_makedocs(["fooworking.md"]; modules=[FooWorking]) do result, success, backtrace, output + run_makedocs( + ["fooworking.md"]; + modules=[FooWorking] + ) do result, success, backtrace, output @test success @test is_same_as_file(output, rfile(7)) end - run_makedocs(["foobroken.md"]; modules=[FooBroken]) do result, success, backtrace, output + run_makedocs( + ["foobroken.md"]; + modules=[FooBroken] + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(8)) end @@ -238,39 +280,60 @@ end # Tests for doctest = :only. The output should reflect that the docs themselves do not # get built. - run_makedocs(["working.md"]; modules=[FooWorking], doctest = :only) do result, success, backtrace, output + run_makedocs( + ["working.md"]; + modules=[FooWorking], + doctest=:only + ) do result, success, backtrace, output @test success @test is_same_as_file(output, rfile(21)) end - run_makedocs(["working.md"]; modules=[FooBroken], doctest = :only) do result, success, backtrace, output + run_makedocs( + ["working.md"]; + modules=[FooBroken], + doctest=:only + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(22)) end - run_makedocs(["broken.md"]; modules=[FooWorking], doctest = :only) do result, success, backtrace, output + run_makedocs( + ["broken.md"]; + modules=[FooWorking], + doctest=:only + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(23)) end - run_makedocs(["broken.md"]; modules=[FooBroken], doctest = :only) do result, success, backtrace, output + run_makedocs( + ["broken.md"]; + modules=[FooBroken], + doctest=:only + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(24)) end # warnonly gets ignored with doctest = :only - run_makedocs(["broken.md"]; modules=[FooBroken], doctest = :only, warnonly=true) do result, success, backtrace, output + run_makedocs( + ["broken.md"]; + modules=[FooBroken], + doctest=:only, + warnonly=true + ) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(25)) end # DocTestSetup in modules - run_makedocs([]; modules=[NoMeta], doctest = :only) do result, success, backtrace, output + run_makedocs([]; modules=[NoMeta], doctest=:only) do result, success, backtrace, output @test !success @test is_same_as_file(output, rfile(31)) end # Now, let's use Documenter's APIs to add the necessary meta information DocMeta.setdocmeta!(NoMeta, :DocTestSetup, :(baz(x) = 2x)) - run_makedocs([]; modules=[NoMeta], doctest = :only) do result, success, backtrace, output + run_makedocs([]; modules=[NoMeta], doctest=:only) do result, success, backtrace, output @test success @test is_same_as_file(output, rfile(32)) end diff --git a/test/doctests/fix/broken.jl b/test/doctests/fix/broken.jl index e7c996a0fe..085803ef92 100644 --- a/test/doctests/fix/broken.jl +++ b/test/doctests/fix/broken.jl @@ -79,23 +79,23 @@ julia> 3 + 4 """ foo() = 1 - """ - ```jldoctest - julia> begin - Main.DocTestFixArray_2468 - end - 4-element Array{Int64,1}: - 1 - 2 - 3 - 4 - ``` - ```jldoctest - julia> println(); println("foo") +""" +```jldoctest +julia> begin + Main.DocTestFixArray_2468 + end +4-element Array{Int64,1}: + 1 + 2 + 3 + 4 +``` +```jldoctest +julia> println(); println("foo") - bar - ``` - """ - foo(x) = 1 +bar +``` +""" +foo(x) = 1 end # module diff --git a/test/doctests/fix/fixed.jl b/test/doctests/fix/fixed.jl index 8d88506c50..3412ccb8aa 100644 --- a/test/doctests/fix/fixed.jl +++ b/test/doctests/fix/fixed.jl @@ -88,24 +88,24 @@ julia> 3 + 4 """ foo() = 1 - """ - ```jldoctest - julia> begin - Main.DocTestFixArray_2468 - end - 4×1×1 Array{Int64,3}: - [:, :, 1] = - 2 - 4 - 6 - 8 - ``` - ```jldoctest - julia> println(); println("foo") +""" +```jldoctest +julia> begin + Main.DocTestFixArray_2468 + end +4×1×1 Array{Int64,3}: +[:, :, 1] = + 2 + 4 + 6 + 8 +``` +```jldoctest +julia> println(); println("foo") - foo - ``` - """ - foo(x) = 1 +foo +``` +""" +foo(x) = 1 end # module diff --git a/test/doctests/fix/tests.jl b/test/doctests/fix/tests.jl index 2d7c30ce6d..c7f7b8f61e 100644 --- a/test/doctests/fix/tests.jl +++ b/test/doctests/fix/tests.jl @@ -4,19 +4,22 @@ # module DocTestFixTest using Documenter, Test -include("../../TestUtilities.jl"); using Main.TestUtilities: @quietly +include("../../TestUtilities.jl") +using Main.TestUtilities: @quietly # Type to reliably show() objects across Julia versions: @eval Main begin struct ShowWrap - s :: String + s::String end Base.show(io::IO, x::ShowWrap) = write(io, x.s) - const DocTestFixArray_1234 = Main.ShowWrap("4×1×1 Array{Int64,3}:\n[:, :, 1] =\n 1\n 2\n 3\n 4") - const DocTestFixArray_2468 = Main.ShowWrap("4×1×1 Array{Int64,3}:\n[:, :, 1] =\n 2\n 4\n 6\n 8") + const DocTestFixArray_1234 = + Main.ShowWrap("4×1×1 Array{Int64,3}:\n[:, :, 1] =\n 1\n 2\n 3\n 4") + const DocTestFixArray_2468 = + Main.ShowWrap("4×1×1 Array{Int64,3}:\n[:, :, 1] =\n 2\n 4\n 6\n 8") end -mktempdir_nocleanup(dir) = mktempdir(dir, cleanup = false) +mktempdir_nocleanup(dir) = mktempdir(dir, cleanup=false) function normalize_line_endings(filename) s = read(filename, String) @@ -36,18 +39,28 @@ function test_doctest_fix(dir) write(src_jl, normalize_line_endings(joinpath(@__DIR__, "broken.jl"))) # fix up - include(joinpath(srcdir, "src.jl")); @eval import .Foo + include(joinpath(srcdir, "src.jl")) + @eval import .Foo @debug "Running doctest/fix doctests with doctest=:fix" - @quietly makedocs(sitename="-", modules = [Foo], source = srcdir, build = builddir, doctest = :fix) + @quietly makedocs( + sitename="-", + modules=[Foo], + source=srcdir, + build=builddir, + doctest=:fix + ) # check that the doctests are passing now - include(joinpath(srcdir, "src.jl")); @eval import .Foo + include(joinpath(srcdir, "src.jl")) + @eval import .Foo @debug "Running doctest/fix doctests with doctest=true" - @quietly makedocs(sitename="-", modules = [Foo], source = srcdir, build = builddir) + @quietly makedocs(sitename="-", modules=[Foo], source=srcdir, build=builddir) # also test that we obtain the expected output - @test normalize_line_endings(index_md) == normalize_line_endings(joinpath(@__DIR__, "fixed.md")) - @test normalize_line_endings(src_jl) == normalize_line_endings(joinpath(@__DIR__, "fixed.jl")) + @test normalize_line_endings(index_md) == + normalize_line_endings(joinpath(@__DIR__, "fixed.md")) + @test normalize_line_endings(src_jl) == + normalize_line_endings(joinpath(@__DIR__, "fixed.jl")) end @testset "doctest fixing" begin diff --git a/test/dom.jl b/test/dom.jl index b91f8586c5..b9849cbc73 100644 --- a/test/dom.jl +++ b/test/dom.jl @@ -33,7 +33,7 @@ import Documenter.DOM: DOM, @tags, HTMLDocument @test length(div[".class"]("...").attributes) === 1 @test div[".class"]("...").attributes[1] == (:class => "class") @test div[:attribute].attributes[1] == (:attribute => "") - @test div[:attribute => "value"].attributes[1] == (:attribute => "value") + @test div[:attribute=>"value"].attributes[1] == (:attribute => "value") let d = div(ul(map(li, [string(n) for n = 1:10]))) @test d.name === :div @@ -63,13 +63,13 @@ import Documenter.DOM: DOM, @tags, HTMLDocument @tags script style img @test string(div(p("one"), p("two"))) == "

one

two

" - @test string(div[:key => "value"]) == "
" - @test string(p(" < > & ' \" ")) == "

< > & ' "

" - @test string(img[:src => "source"]) == "" - @test string(img[:none]) == "" - @test string(script(" < > & ' \" ")) == "" - @test string(style(" < > & ' \" ")) == "" - @test string(script) == "" + @test string(style(" < > & ' \" ")) == "" + @test string(script) == "