diff --git a/conf.schema.json b/conf.schema.json index a511bf9e..7c1be27a 100644 --- a/conf.schema.json +++ b/conf.schema.json @@ -109,12 +109,30 @@ "type": "object", "properties": { "indexFile": { - "description": "Path relative to 'outDir' where to write an index file with glossary terms and links to there occurrences in text.", - "type": "string", + "description": "Generate a file with a list of glossary terms and where they have been used.", + "oneOf": [{ + "type": "string" + }, { + "type": "object", + "$ref": "#/$defs/IndexFile" + }], "default": "" } } }, + "IndexFile": { + "type": "object", + "properties": { + "file": { + "description": "Path relative to 'outDir' where to create the index markdown file.", + "type": "string" + }, + "title": { + "description": "The page title for the index file. If missing the application uses a default value.", + "type": "string" + } + } + }, "Glossary": { "type": "object", "properties": { diff --git a/lib/ast-tools.js b/lib/ast-tools.js index 592e7909..78e4a2e7 100644 --- a/lib/ast-tools.js +++ b/lib/ast-tools.js @@ -18,6 +18,18 @@ api.getNodeText = function getNodeText(node) { } } +api.getLinkUrl = function getLinkUrl(node) { + if (! node) { + return; + } else if (node.type === "link") { + return node.url; + } else if (node.children && node.children.length > 0) { + return getLinkUrl(node.children[0]); + } else { + return; + } +} + /** * No-op compiler to satisfy unifiedjs * @private diff --git a/lib/index.js b/lib/index.js deleted file mode 100644 index 63d584df..00000000 --- a/lib/index.js +++ /dev/null @@ -1,38 +0,0 @@ -const {relativeFromTo, toForwardSlash} = require("./pathplus"); -const path = require("path"); - -const api = {}; - -api.writeIndex = function(context) { - let md = `# Index\n\n`; - const {terms: dict, opts} = context; - const {outDir, generateFiles} = opts; - const {indexFile} = generateFiles; - dict.byDefinition().forEach((terms) => { - const text = terms[0].term; - const occurrences = terms[0].occurrences; // [1] - md += `\n\n#### ${text}\n\n`; - - // Link to glossary definitions - for (term of terms) { - md += ` - [${term.glossary.title}](${getGlossaryUrl(term, outDir, indexFile)})`; - }; - - // Links to occurrences - for (url of Object.keys(occurrences).sort()) { - md += ` - [${occurrences[url]}](${toForwardSlash(url)})`; - } - }); - return md; -} - -function getGlossaryUrl(term, outDir, indexFile) { - return toForwardSlash( - relativeFromTo( - path.resolve(outDir, indexFile || "."), - path.resolve(outDir, term.glossary.file) - ) + term.anchor - ); -} - -module.exports = api; diff --git a/lib/indexer.js b/lib/indexer.js new file mode 100644 index 00000000..61597fcb --- /dev/null +++ b/lib/indexer.js @@ -0,0 +1,183 @@ +const uVisit = require('unist-util-visit'); +const {root, paragraph, text, heading, brk, link } = require('mdast-builder'); +const url = require('url'); +const path = require('path'); + +const {relativeFromTo, toForwardSlash} = require('./pathplus'); +const {getLinkUrl: getMarkdownLinkUrl, getNodeText} = require('./ast-tools.js'); + +/** + * Index built when using unified indexer() plug-in. + * + * { + * "term": { + * definitions: [Term, Term], + * occurrences: { + * "./document1#foo": { headingNode: Node } + * "./document2#bar": { headingNode: Node } + * } + * } + * } + */ +const index = {} + + +/** + * Unified plug-in to scan for links to glossary terms and remember their + * section of use for index file generation. + */ +function indexer(context) { + const indexFilename = getIndexFilename(context); + if (! indexFilename) { + return () => (tree, vFile) => {}; + } else { + return () => (tree, vFile) => { + const currentDocFilename = `${vFile.dirname}/${vFile.basename}`; + uVisit(tree, 'term-occurrence', getNodeVisitor(context, indexFilename, currentDocFilename)); + }; + } +} + +function getNodeVisitor(context, fromIndexFile, toDocumentFile) { + return function visitor(node) { + const {termDefs, headingNode} = node; + let headingAnchor; + if (headingNode) { + headingAnchor = getMarkdownLinkUrl(headingNode); + } else { + headingAnchor = ""; + } + + // Get URL from index file to the section (heading) in which the term was found + const docRef = getFileLinkUrl(context, fromIndexFile, toDocumentFile, headingAnchor) + const term = termDefs[0].term; + if (! index[term]) { + index[term] = { + definitions: termDefs, + occurrences: { + [docRef]: { headingNode } + } + } + } + }; +} + +/** + * Returns the filename relative to 'outDir' as given by glossarify-md config + * + * @param {} context + */ +function getIndexFilename(context) { + const { indexFile } = context.opts.generateFiles; + if (indexFile && typeof indexFile === "object") { + return indexFile.file; + } else { + return indexFile; + } +} + +/** + * Returns the markdown abstract syntax tree that is to be written to the file + * configured via 'generateFiles.indexFile' config. + * + * @param {*} context + */ +function getAST(context) { + const {indexFile} = context.opts.generateFiles; + let title = ""; + let indexFilename = ""; + if (indexFile !== null && typeof indexFile === "object") { + title = indexFile.title; + indexFilename = indexFile.file; + } else { + indexFilename = indexFile; + } + + // Create AST from index + let tree = [ + heading(1, text(title || 'Book Index')), + // Concatenate AST for each index entry + ...Object + .keys(index) + .sort() + .map(key => getIndexEntryAst(context, index[key], indexFilename)) + ]; + return root(tree); +} + +function getIndexEntryAst(context, indexEntry, indexFilename) { + return heading(4, [ + text(indexEntry.definitions[0].term), + brk, + paragraph( + getEntryLinksAst(context, indexEntry, indexFilename) + ) + ]); +} + +function getEntryLinksAst(context, indexEntry, indexFilename) { + const links = [ + ...getGlossaryLinksAst(context, indexEntry, indexFilename), + ...getDocumentLinksAst(context, indexEntry) + ]; + const linksSeparated = []; + for (let i = 0, len = links.length; i < len; i++) { + if (i > 0) { + linksSeparated.push(text(' - ')); // link separator + } + linksSeparated.push(links[i]); + } + return linksSeparated; +} + +function getGlossaryLinksAst(context, indexEntry, fromIndexFilename) { + return indexEntry.definitions.map((term, i) => { + const toGlossaryFilename = term.glossary.outPath; + const url = getFileLinkUrl(context, fromIndexFilename, toGlossaryFilename, term.anchor); + return link(url, term.getShortDescription(), text(term.glossary.title)); + }); +} + +function getDocumentLinksAst(context, indexEntry) { + return Object.keys(indexEntry.occurrences).map((ref) => { + const {headingNode} = indexEntry.occurrences[ref]; + const linkText = getNodeText(headingNode); + return link(ref, null, text(linkText)); + }); +} + +/** + * Returns the URL for the section heading preceding a term occurrence. + * + * @param {*} context + * @param {string} filenameFrom path + * @param {string} filenameTo path + * @param {string} anchor optional anchor or url fragment for references to sections + */ +function getFileLinkUrl(context, filenameFrom, filenameTo, anchor) { + const {outDir, baseUrl, linking, generateFiles} = context.opts; + let targetUrl = ""; + if (linking === 'relative') { + targetUrl = toForwardSlash( + relativeFromTo( + path.resolve(outDir, filenameFrom || "."), + path.resolve(outDir, filenameTo) + ) + ) + anchor; + } else if (linking === 'absolute') { + if (baseUrl) { + targetUrl = toForwardSlash(path.resolve(outDir, filenameFrom)) + .replace(outDir, baseUrl) + .replace(/^(.*)(\/|\\)$/, "$1") + + anchor; + } else { + targetUrl = toForwardSlash(path.resolve(outDir, filenameFrom)) + + anchor; + } + } else { + targetUrl = anchor; + } + return url.parse(targetUrl).format(); +} + +module.exports = { indexer, getAST }; diff --git a/lib/linker.js b/lib/linker.js index 8d49ca4c..f16a45dd 100644 --- a/lib/linker.js +++ b/lib/linker.js @@ -1,7 +1,8 @@ const Term = require("./term.js"); const linkifyRegex = require("./linkify"); -const {printAst, getNodeText, noopCompiler} = require("./ast-tools.js"); -const {relativeFromTo, toForwardSlash} = require("./pathplus"); +const {indexer} = require("./indexer"); +const {printAst, noopCompiler} = require("./ast-tools.js"); +const {toForwardSlash} = require("./pathplus"); const path = require("path"); const unified = require("unified"); const unifiedNgin = require("unified-engine"); @@ -14,6 +15,12 @@ const remark_ref_links = require("remark-reference-links"); const url = require("url"); const api = {}; +// Tell remark_stringify to not produce any output for a term occurrence +remark_stringify + .Compiler + .prototype + .visitors["term-occurrence"] = function(node) {}; + /** * Reads the pile of non-glossary markdown files and replaces plaintext term * occurrences with linked terms pointing at the term's definition in a @@ -28,14 +35,15 @@ api.linkTermOccurrences = function(context) { excludeFiles, glossaries, experimentalFootnotes } = context.opts; return new Promise((resolve, reject) => { - unifiedNgin({ + const unifiedNginConf = { processor: unified() .use(remark_parse, { footnotes: experimentalFootnotes }) .use(printAst(context.opts.dev.printInputAst)) // Might be regex. /.*\/table\.md/g; .use(remark_slug) - .use(remark_link_headings, {behavior: 'wrap'}) .use(linker(context)) .use(remark_ref_links) + .use(remark_link_headings, {behavior: 'wrap'}) + .use(indexer(context)) .use(printAst(context.opts.dev.printOutputAst)) .use(noopCompiler) .use(remark_stringify) @@ -56,15 +64,15 @@ api.linkTermOccurrences = function(context) { ,out: false ,color: true ,silent: false - }, (err, statusCode, uContext) => { - if(err) { - reject(err); - } else { - context.vFiles = [...context.vFiles, ...uContext.files]; - resolve(context); - } + }; + unifiedNgin(unifiedNginConf, (err, statusCode, uContext) => { + if(err) { + reject(err); + } else { + context.vFiles = [...context.vFiles, ...uContext.files]; + resolve(context); } - ) + }); }); } @@ -78,7 +86,7 @@ api.linkTermOccurrences = function(context) { function linker(context) { return () => (tree, file) => { const {terms} = context; - let sectionNode = null; + let headingNode = null; uVisit(tree, (node, index, parent) => { /*visitor*/ if ( @@ -86,13 +94,13 @@ function linker(context) { || node.type === "tableCell" ) { terms.byDefinition().forEach(termDefs => { - linkify(node, sectionNode, termDefs, context, file); + linkify(node, headingNode, termDefs, context, file); }); return uVisit.SKIP; // skip children of paragraphs, since they've already been // visited by regexLinkify. } else if (node.type === "heading") { - sectionNode = node; + headingNode = node; return uVisit.SKIP; } else if ( node.type === "blockquote" @@ -116,22 +124,25 @@ function linker(context) { * @param {*} vFile current markdown document file * @returns AST node with links */ -function linkify(txtNode, sectionNode, termDefs, context, vFile) { - const sectionUrl = getIndexUrlToSection(context, sectionNode, vFile); - const sectionTitle = getNodeText(sectionNode); +function linkify(txtNode, headingNode, termDefs, context, vFile) { const term = termDefs[0]; const hasMultipleDefs = termDefs.length > 1; - let linkNode_ = null; + let paragraph = null; + let _linkNode = null; // Primary term link at occurrence. Points at a single glossary but may be // followed by additional numbered glossary links if there are multiple // term definitions. - let txtNode_ = linkifyRegex(term.regex, term, (linkNode) => { + const onLinkNode = function(linkNode) { linkNode.title = term.getShortDescription(); linkNode.url = getGlossaryUrl(context, term, vFile); - linkNode_ = linkNode; + linkNode.children.push({ + type: "term-occurrence", + termDefs: termDefs, + headingNode: headingNode + }); if (! hasMultipleDefs) { - term.addOccurrence(sectionTitle, sectionUrl); + term.countOccurrence(); if (term.hint) { if (/\$\{term\}/.test(term.hint)) { linkNode.children[0].value = term.hint.replace("${term}", linkNode.children[0].value); @@ -140,23 +151,25 @@ function linkify(txtNode, sectionNode, termDefs, context, vFile) { } } } + _linkNode = linkNode; return linkNode; - })()(txtNode); + } + paragraph = linkifyRegex(term.regex, term, onLinkNode)()(txtNode); // Multiple definitions? if (! hasMultipleDefs) { - return txtNode_; + return paragraph; } // Multiple definitions! Append numbered links for every definition. // Insert at linkIndex + 1... - const childr = txtNode_.children; - const linkIndex = childr.indexOf(linkNode_); + const childr = paragraph.children; + const linkIndex = childr.indexOf(_linkNode); if (linkIndex >= 0) { - txtNode_.children = childr + paragraph.children = childr .slice(0, linkIndex + 1) .concat(termDefs.map((t, i) => { - t.addOccurrence(sectionTitle, sectionUrl); + t.countOccurrence(); return { type: "link", title: t.getShortDescription(), @@ -169,12 +182,12 @@ function linkify(txtNode, sectionNode, termDefs, context, vFile) { })) .concat(childr.slice(linkIndex + 1)); } - return txtNode_; + return paragraph; } /** - * Returns the link URL to the glossary file and definition section of a term. + * Returns the link URL to the glossary file and definition heading of a term. * In case of "relative" linking config, returns the link URL relative to the * file where the term occurred. * @@ -213,45 +226,4 @@ function getGlossaryUrl(context, term, vFile) { return url.parse(termUrl).format(); } -/** - * Returns the URL of the section of a term occurrence. - * - * @param {*} context - * @param {*} vFile - * @param {*} sectionNode - */ -function getIndexUrlToSection(context, sectionNode, vFile) { - const {outDir, baseUrl, linking, generateFiles} = context.opts; - const {indexFile} = generateFiles; - let sectionAnchor; - if (sectionNode) { - sectionAnchor = sectionNode.children[0].url; - } else { - sectionAnchor = ""; - } - let sectionUrl = ""; - let file = `${vFile.dirname}/${vFile.basename}`; - if (linking === 'relative') { - sectionUrl = toForwardSlash( - relativeFromTo( - path.resolve(outDir, indexFile || "."), - path.resolve(outDir, file) - ) - ) + sectionAnchor; - } else if (linking === 'absolute') { - if (baseUrl) { - sectionUrl = toForwardSlash(path.resolve(outDir, file)) - .replace(outDir, baseUrl) - .replace(/^(.*)(\/|\\)$/, "$1") - + sectionAnchor; - } else { - sectionUrl = toForwardSlash(path.resolve(outDir, file)) - + sectionAnchor; - } - } else { - sectionUrl = sectionAnchor; - } - return url.parse(sectionUrl).format(); -} - module.exports = api; diff --git a/lib/pathplus.js b/lib/pathplus.js index 3c249b1e..ac7726ee 100644 --- a/lib/pathplus.js +++ b/lib/pathplus.js @@ -2,18 +2,17 @@ const path = require("path"); const proc = require("process"); const CWD = proc.cwd(); -const api = {}; const forwSlashRegex = new RegExp("\\" + path.sep, "g"); const sysSlashRegex = new RegExp("/", "g"); -api.toForwardSlash = function toForwardSlash(p) { +toForwardSlash = function toForwardSlash(p) { if (Array.isArray(p)) { return p.map(item => toForwardSlash(item)); } return p.replace(forwSlashRegex, "/"); } -api.toSystemSlash = function toSystemSlash(p) { +toSystemSlash = function toSystemSlash(p) { if (Array.isArray(p)) { return p.map(item => toSystemSlash(item)); } @@ -39,9 +38,9 @@ api.toSystemSlash = function toSystemSlash(p) { * @param {string} from absolute path * @param {string} to absolute path */ -api.relativeFromTo = function(from, to) { - const from_ = api.toForwardSlash(path.resolve(from)).split("/"); - const to_ = api.toForwardSlash(path.resolve(to)).split("/"); +function relativeFromTo(from, to) { + const from_ = toForwardSlash(path.resolve(from)).split("/"); + const to_ = toForwardSlash(path.resolve(to)).split("/"); let dirFrom, dirTo; do { dirFrom = from_.shift(); @@ -55,7 +54,7 @@ api.relativeFromTo = function(from, to) { to_.unshift(dirTo); } const nav = from_.map(() => "../").join("") || "./"; // [2] - return api.toSystemSlash(`${nav}${to_.join("/")}`); + return toSystemSlash(`${nav}${to_.join("/")}`); // Implementation Notes: // [1] If this is no longer true, we reached a fork. @@ -63,9 +62,50 @@ api.relativeFromTo = function(from, to) { // If there are no such segments, 'from' is *on* the path of 'to'. } -api.toRedactedPath = function redactPath(fullPath, redactString) { +function toRedactedPath(fullPath, redactString) { redactString = redactString || "{redacted}"; return `${path.sep}${redactString}${path.sep}${path.relative(CWD, fullPath)}`; }; -module.exports = api; + +/** + * Returns the URL for the section heading preceding a term occurrence. + * + * @param {*} context + * @param {string} filenameFrom path + * @param {string} filenameTo path + * @param {string} anchor optional anchor or url fragment for references to sections + */ +function getFileLinkUrl(context, filenameFrom, filenameTo, anchor) { + const {outDir, baseUrl, linking, generateFiles} = context.opts; + let targetUrl = ""; + if (linking === 'relative') { + targetUrl = toForwardSlash( + relativeFromTo( + path.resolve(outDir, filenameFrom || "."), + path.resolve(outDir, filenameTo) + ) + ) + anchor; + } else if (linking === 'absolute') { + if (baseUrl) { + targetUrl = toForwardSlash(path.resolve(outDir, filenameFrom)) + .replace(outDir, baseUrl) + .replace(/^(.*)(\/|\\)$/, "$1") + + anchor; + } else { + targetUrl = toForwardSlash(path.resolve(outDir, filenameFrom)) + + anchor; + } + } else { + targetUrl = anchor; + } + return url.parse(targetUrl).format(); +} + +module.exports = { + toForwardSlash + , toSystemSlash + , relativeFromTo + , getFileLinkUrl + , toRedactedPath +}; diff --git a/lib/term.js b/lib/term.js index 1fd69b25..e89a778a 100644 --- a/lib/term.js +++ b/lib/term.js @@ -12,7 +12,6 @@ class Term { this.regex = data.regex || ""; this.aliases = [...(data.aliases || [])]; this.ignoreCase = false || data.ignoreCase; - this.occurrences = {}; this.countOccurrenceTotal = 0; this.setTerm(this.term); this.setAliases(this.aliases); @@ -28,11 +27,7 @@ class Term { updateRegExp(this); } - addOccurrence(sectionTitle, sectionUrl) { - const occurrences = this.occurrences; - if (sectionUrl && !occurrences[sectionUrl]) { - this.occurrences[sectionUrl] = sectionTitle || sectionUrl; - } + countOccurrence() { this.countOccurrenceTotal++; } diff --git a/lib/writer.js b/lib/writer.js index dd5aed3a..33145c39 100644 --- a/lib/writer.js +++ b/lib/writer.js @@ -3,13 +3,12 @@ const path = require("path"); const proc = require("process"); const glob = require("glob"); const unified = require("unified"); -const remark_parse = require("remark-parse"); const remark_slug = require("remark-slug"); const remark_stringify = require("remark-stringify"); const remark_ref_links = require("remark-reference-links"); const {noopCompiler} = require("./ast-tools"); const {toSystemSlash, toForwardSlash, toRedactedPath} = require("./pathplus"); -const {writeIndex} = require("./index"); +const indexer = require('./indexer'); const api = {}; api.copyBaseDirToOutDir = function(context) { @@ -75,24 +74,26 @@ api.writeIndex = function(context) { const {indexFile} = generateFiles; if (! indexFile) { return Promise.resolve(context);; + } else if (typeof indexFile === "object") { + return this.writeMarkdownFile(context, indexer.getAST(context), indexFile.file); } else { - return this.writeMarkdownFile(context, writeIndex(context), indexFile); + return this.writeMarkdownFile(context, indexer.getAST(context), indexFile); } } -api.writeMarkdownFile = function writeMarkdownFile(context, mdText, filename) { +api.writeMarkdownFile = function writeMarkdownFile(context, mdAst, filename) { const {outDir, experimentalFootnotes} = context.opts; return new Promise((resolve, reject) => { - unified() - .use(remark_parse, { footnotes: experimentalFootnotes }) + let processor = unified() .use(remark_slug) .use(remark_ref_links) .use(noopCompiler) - .use(remark_stringify) - .process(mdText, (err, file) => { + .use(remark_stringify); + + processor.run(mdAst, (err, tree) => { if (err) reject(err); const p = path.resolve(outDir, filename); - fs.outputFile(p, file.contents, (err) => { + fs.outputFile(p, processor.stringify(tree), (err) => { if (err) { reject(err); } else { @@ -117,24 +118,11 @@ api.writeReport = function(context) { } function redactPaths(context) { - const {terms, glossaries} = context; - + const {glossaries} = context; Object.values(glossaries).forEach(gloss => { gloss.basePath = toForwardSlash(toRedactedPath(gloss.basePath)); gloss.outPath = toForwardSlash(toRedactedPath(gloss.outPath )); }); - - terms.byOccurrence().forEach((term) => { - const occurrences = term.occurrences; - const paths = Object.keys(occurrences).sort(); - const newPaths = {}; - for (i = 0, len = paths.length; i < len; i++) { - const oldPath = paths[i]; - const newPath = toForwardSlash(toRedactedPath(paths[i])); - newPaths[newPath] = occurrences[oldPath]; - } - term.occurrences = newPaths; - }); } /** diff --git a/package-lock.json b/package-lock.json index f372ae01..feabd09c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1364,6 +1364,14 @@ "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-1.1.3.tgz", "integrity": "sha512-1RUZVgQlpJSPWYbFSpmudq5nHY1doEIv89gBtF0s4gW1GF2XorxcA/70M5vq7rLv0a6mhOUccRsqkwhwLCIQ2Q==" }, + "mdast-builder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/mdast-builder/-/mdast-builder-1.1.1.tgz", + "integrity": "sha512-a3KBk/LmYD6wKsWi8WJrGU/rXR4yuF4Men0JO0z6dSZCm5FrXXWTRDjqK0vGSqa+1M6p9edeuypZAZAzSehTUw==", + "requires": { + "@types/unist": "^2.0.3" + } + }, "mdast-util-compact": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/mdast-util-compact/-/mdast-util-compact-1.0.3.tgz", @@ -2367,9 +2375,9 @@ } }, "vfile": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.0.1.tgz", - "integrity": "sha512-lRHFCuC4SQBFr7Uq91oJDJxlnftoTLQ7eKIpMdubhYcVMho4781a8MWXLy3qZrZ0/STD1kRiKc0cQOHm4OkPeA==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-4.0.2.tgz", + "integrity": "sha512-yhoTU5cDMSsaeaMfJ5g0bUKYkYmZhAh9fn9TZicxqn+Cw4Z439il2v3oT9S0yjlpqlI74aFOQCt3nOV+pxzlkw==", "requires": { "@types/unist": "^2.0.0", "is-buffer": "^2.0.0", @@ -2384,11 +2392,11 @@ "integrity": "sha512-Pa1ey0OzYBkLPxPZI3d9E+S4BmvfVwNAAXrrqGbwTVXWaX2p9kM1zZ+n35UtVM06shmWKH4RPRN8KI80qE3wNQ==" }, "vfile-message": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.1.tgz", - "integrity": "sha512-KtasSV+uVU7RWhUn4Lw+wW1Zl/nW8JWx7JCPps10Y9JRRIDeDXf8wfBLoOSsJLyo27DqMyAi54C6Jf/d6Kr2Bw==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-2.0.2.tgz", + "integrity": "sha512-gNV2Y2fDvDOOqq8bEe7cF3DXU6QgV4uA9zMR2P8tix11l1r7zju3zry3wZ8sx+BEfuO6WQ7z2QzfWTvqHQiwsA==", "requires": { - "@types/unist": "^2.0.2", + "@types/unist": "^2.0.0", "unist-util-stringify-position": "^2.0.0" } }, diff --git a/package.json b/package.json index 963c4f09..7b5d5149 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "start": "node ./bin/index.js --config ./test/input/glossarify-md.conf.json", "debug": "node --inspect-brk ./bin/index.js --config ./debug/glossarify-md.conf.json", "debug-cfg": "node --inspect-brk ./bin/index.js --config", + "config": "node ./bin/index.js --config", "test": "cd ./test && npm test", "test-win": "cd .\\test && npm run test-win", "dry-release": "standard-version --dry-run", @@ -51,6 +52,7 @@ "fs-extra": "^8.1.0", "github-slugger": "^1.0.0", "glob": "^7.0.3", + "mdast-builder": "^1.1.1", "minimist": "^1.2.0", "minimist-options": "^4.0.1", "remark-autolink-headings": "^5.2.1", @@ -63,7 +65,8 @@ "unist-builder": "^1.0.4", "unist-util-flatmap": "^1.0.0", "unist-util-visit": "^2.0.0", - "unist-util-visit-parents": "^3.0.0" + "unist-util-visit-parents": "^3.0.0", + "vfile": "^4.0.2" }, "devDependencies": { "standard-version": "^7.0.0"