+ Content is empty! +
++ Type any Markdown or MDC code in + editor to see it replaced by rendered nodes in this panel. +
+` tags should parse and render as inline code
+ if (tagName === 'code')
+ node.value = node.value.replace(tagName, 'code-inline')
+
+ return h.dangerous ? h.augment(node, u('raw', node.value)) : null
+}
diff --git a/docs/markdown-parser/handler/image.ts b/docs/markdown-parser/handler/image.ts
new file mode 100644
index 00000000..c9abbc0a
--- /dev/null
+++ b/docs/markdown-parser/handler/image.ts
@@ -0,0 +1,19 @@
+import type { H } from 'mdast-util-to-hast'
+import { encode } from 'mdurl'
+
+/**
+ * @type {Handler}
+ * @param {Image} node
+ */
+export default function image(h: H, node: any) {
+ const props: any = {
+ ...node.attributes,
+ src: encode(node.url),
+ alt: node.alt,
+ }
+
+ if (node.title !== null && node.title !== undefined)
+ props.title = node.title
+
+ return h(node, 'img', props)
+}
diff --git a/docs/markdown-parser/handler/index.ts b/docs/markdown-parser/handler/index.ts
new file mode 100644
index 00000000..bae7846a
--- /dev/null
+++ b/docs/markdown-parser/handler/index.ts
@@ -0,0 +1,33 @@
+import emphasis from './emphasis'
+import code from './code'
+import html from './html'
+import heading from './heading'
+import link from './link'
+import list from './list'
+import listItem from './listItem'
+import table from './table'
+import paragraph from './paragraph'
+import image from './image'
+import blockquote from './blockquote'
+import strong from './strong'
+import inlineCode from './inlineCode'
+import thematicBreak from './thematicBreak'
+import containerComponent from './containerComponent'
+
+export default {
+ emphasis,
+ code,
+ paragraph,
+ html,
+ link,
+ list,
+ listItem,
+ heading,
+ table,
+ image,
+ blockquote,
+ strong,
+ inlineCode,
+ thematicBreak,
+ containerComponent,
+}
diff --git a/docs/markdown-parser/handler/inlineCode.ts b/docs/markdown-parser/handler/inlineCode.ts
new file mode 100644
index 00000000..162aa3f3
--- /dev/null
+++ b/docs/markdown-parser/handler/inlineCode.ts
@@ -0,0 +1,9 @@
+import type { H } from 'mdast-util-to-hast'
+import { u } from 'unist-builder'
+
+export default function inlineCode(h: H, node: any) {
+ return h(node, 'code-inline', node.attributes, [
+ // @ts-expect-error
+ u('text', node.value.replace(/\r?\n|\r/g, ' ')),
+ ])
+}
diff --git a/docs/markdown-parser/handler/link.ts b/docs/markdown-parser/handler/link.ts
new file mode 100644
index 00000000..e98ab209
--- /dev/null
+++ b/docs/markdown-parser/handler/link.ts
@@ -0,0 +1,35 @@
+// import { join } from 'path'
+// import fs from 'fs'
+import type { H } from 'mdast-util-to-hast'
+import { all } from 'mdast-util-to-hast'
+import { encode } from 'mdurl'
+import type { MdastNode } from 'mdast-util-to-hast/lib'
+import { isRelative } from 'ufo'
+import { generatePath } from '../../transformers/path-meta'
+
+type Node = MdastNode & {
+ title: string
+ url: string
+ attributes?: any
+ tagName: string
+ children?: Node[]
+}
+
+export default function link(h: H, node: Node) {
+ const props: any = {
+ ...((node.attributes || {}) as object),
+ href: encode(normalizeLink(node.url)),
+ }
+
+ if (node.title !== null && node.title !== undefined)
+ props.title = node.title
+
+ return h(node, 'a', props, all(h, node))
+}
+
+function normalizeLink(link: string) {
+ if (link.endsWith('.md') && (isRelative(link) || (!/^https?/.test(link) && !link.startsWith('/'))))
+ return generatePath(link.replace(/\.md$/, ''), { forceLeadingSlash: false })
+ else
+ return link
+}
diff --git a/docs/markdown-parser/handler/list.ts b/docs/markdown-parser/handler/list.ts
new file mode 100644
index 00000000..146e5f13
--- /dev/null
+++ b/docs/markdown-parser/handler/list.ts
@@ -0,0 +1,25 @@
+import type { H } from 'mdast-util-to-hast'
+import { all } from 'mdast-util-to-hast'
+import type { MdastNode } from 'mdast-util-to-hast/lib'
+import { wrap } from './utils'
+
+type Node = MdastNode & {
+ ordered?: boolean
+ start?: number
+ checked?: boolean
+ children: Node[]
+}
+
+export default function list(h: H, node: Node) {
+ const props: any = {}
+ const name = `${node.ordered ? 'ol' : 'ul'}`
+
+ if (typeof node.start === 'number' && node.start !== 1)
+ props.start = node.start
+
+ // Add class for task list. See: https://github.com/remarkjs/remark-gfm#use
+ if ((node.children || []).some(child => typeof child.checked === 'boolean'))
+ props.className = ['contains-task-list']
+
+ return h(node, name, props, wrap(all(h, node), true))
+}
diff --git a/docs/markdown-parser/handler/listItem.ts b/docs/markdown-parser/handler/listItem.ts
new file mode 100644
index 00000000..c8a98665
--- /dev/null
+++ b/docs/markdown-parser/handler/listItem.ts
@@ -0,0 +1,66 @@
+// import { u } from 'unist-builder'
+import type { H } from 'mdast-util-to-hast'
+import { all } from 'mdast-util-to-hast'
+import type { MdastNode } from 'mdast-util-to-hast/lib'
+
+type Node = MdastNode & {
+ tagName: string
+ checked?: boolean
+ spread?: boolean
+ children?: Node[]
+}
+
+export default function listItem(h: H, node: Node, parent: Node) {
+ const result = all(h, node)
+ const loose = parent ? listLoose(parent) : listItemLoose(node)
+ const props: any = {}
+ let wrapped: any[] = []
+ let index
+ let child
+
+ if (typeof node.checked === 'boolean') {
+ result.unshift(
+ h({} as any, 'input', {
+ type: 'checkbox',
+ checked: node.checked,
+ disabled: true,
+ }),
+ )
+
+ // According to github-markdown-css, this class hides bullet.
+ // See: .
+ props.className = ['task-list-item']
+ }
+
+ const length = result.length
+ index = -1
+
+ while (++index < length) {
+ child = result[index] as Node
+
+ if (child.tagName === 'p' && !loose)
+ wrapped = wrapped.concat(child.children || [])
+ else
+ wrapped.push(child)
+ }
+
+ return h(node, 'li', props, wrapped)
+}
+
+function listLoose(node: Node) {
+ let loose = node.spread
+ const children = node.children as Node[]
+ const length = children.length
+ let index = -1
+
+ while (!loose && ++index < length)
+ loose = listItemLoose(children[index])
+
+ return loose
+}
+
+function listItemLoose(node: Node) {
+ const spread = node.spread
+ const children = (node.children || []) as Node[]
+ return spread === undefined || spread === null ? children.length > 1 : spread
+}
diff --git a/docs/markdown-parser/handler/paragraph.ts b/docs/markdown-parser/handler/paragraph.ts
new file mode 100644
index 00000000..aa521d21
--- /dev/null
+++ b/docs/markdown-parser/handler/paragraph.ts
@@ -0,0 +1,16 @@
+import type { H } from 'mdast-util-to-hast'
+import { all } from 'mdast-util-to-hast'
+
+import htmlTags from 'html-tags'
+import { kebabCase } from 'scule'
+import { getTagName } from './utils'
+
+export default function paragraph(h: H, node: any) {
+ if (node.children && node.children[0] && node.children[0].type === 'html') {
+ const tagName = kebabCase(getTagName(node.children[0].value) || 'div')
+ // Unwrap if component
+ if (!htmlTags.includes(tagName))
+ return all(h, node)
+ }
+ return h(node, 'p', all(h, node))
+}
diff --git a/docs/markdown-parser/handler/strong.ts b/docs/markdown-parser/handler/strong.ts
new file mode 100644
index 00000000..12d27a2c
--- /dev/null
+++ b/docs/markdown-parser/handler/strong.ts
@@ -0,0 +1,6 @@
+import type { H } from 'mdast-util-to-hast'
+import { all } from 'mdast-util-to-hast'
+
+export default function strong(h: H, node: any) {
+ return h(node, 'strong', node.attributes, all(h, node))
+}
diff --git a/docs/markdown-parser/handler/table.ts b/docs/markdown-parser/handler/table.ts
new file mode 100644
index 00000000..6d473308
--- /dev/null
+++ b/docs/markdown-parser/handler/table.ts
@@ -0,0 +1,35 @@
+import type { H } from 'mdast-util-to-hast'
+import { position } from 'unist-util-position'
+import { all } from 'mdast-util-to-hast'
+import { wrap } from './utils'
+
+export default function table(h: H, node: any) {
+ const rows = node.children
+ const align = node.align || []
+
+ const result = rows.map((row: any, index: number) => {
+ const childres = row.children
+ const name = index === 0 ? 'th' : 'td'
+ let pos = node.align ? align.length : childres.length
+ const out = []
+
+ while (pos--) {
+ const cell = childres[pos]
+ out[pos] = h(cell, name, { align: align[pos] }, cell ? all(h, cell) : [])
+ }
+
+ return h(row, 'tr', wrap(out, true))
+ })
+
+ const body
+ = result[1]
+ && h(
+ {
+ start: position(result[1]).start,
+ end: position(result[result.length - 1]).end,
+ } as any,
+ 'tbody',
+ wrap(result.slice(1), true),
+ )
+ return h(node, 'table', wrap([h(result[0].position, 'thead', wrap([result[0]], true))].concat(body || []), true))
+}
diff --git a/docs/markdown-parser/handler/thematicBreak.ts b/docs/markdown-parser/handler/thematicBreak.ts
new file mode 100644
index 00000000..3f1f772c
--- /dev/null
+++ b/docs/markdown-parser/handler/thematicBreak.ts
@@ -0,0 +1,6 @@
+import type { H } from 'mdast-util-to-hast'
+import type { MdastNode } from 'mdast-util-to-hast/lib'
+
+export default function thematicBreak(h: H, node: MdastNode) {
+ return h(node, 'hr')
+}
diff --git a/docs/markdown-parser/handler/utils.ts b/docs/markdown-parser/handler/utils.ts
new file mode 100644
index 00000000..62b456d8
--- /dev/null
+++ b/docs/markdown-parser/handler/utils.ts
@@ -0,0 +1,71 @@
+import { u } from 'unist-builder'
+
+/**
+ * Parses the value defined next to 3 back ticks
+ * in a codeblock and set line-highlights or
+ * filename from it
+ */
+export function parseThematicBlock(lang: string) {
+ /**
+ * Language property on node is missing
+ */
+ if (!lang) {
+ return {
+ language: undefined,
+ highlights: undefined,
+ fileName: undefined,
+ }
+ }
+
+ const language = lang.replace(/[{|[](.+)/, '').match(/^[^ \t]+(?=[ \t]|$)/)
+ const highlightTokens = lang.match(/{([^}]+)}/)
+ const filenameTokens = lang.match(/\[(.+)\]/)
+
+ return {
+ language: language ? language[0] : undefined,
+ highlights: parseHighlightedLines(highlightTokens && highlightTokens[1]),
+ filename: Array.isArray(filenameTokens) ? filenameTokens[1] : undefined,
+ }
+}
+
+function parseHighlightedLines(lines?: string | null) {
+ const lineArray = String(lines || '')
+ .split(',')
+ .filter(Boolean)
+ .flatMap((line) => {
+ const [start, end] = line.trim().split('-').map(a => Number(a.trim()))
+ return Array.from({ length: (end || start) - start + 1 }).map((_, i) => start + i)
+ })
+ return lineArray.length ? lineArray : undefined
+}
+
+const TAG_NAME_REGEXP = /^<\/?([A-Za-z0-9-_]+) ?[^>]*>/
+export function getTagName(value: string) {
+ const result = String(value).match(TAG_NAME_REGEXP)
+
+ return result && result[1]
+}
+
+// mdast-util-to-hast/lib/wrap.js
+/**
+ * Wrap `nodes` with line feeds between each entry.
+ * Optionally adds line feeds at the start and end.
+ */
+export function wrap(nodes: any[], loose = false) {
+ const result = []
+ let index = -1
+
+ if (loose)
+ result.push(u('text', '\n'))
+
+ while (++index < nodes.length) {
+ if (index)
+ result.push(u('text', '\n'))
+ result.push(nodes[index])
+ }
+
+ if (loose && nodes.length > 0)
+ result.push(u('text', '\n'))
+
+ return result
+}
diff --git a/docs/markdown-parser/index.ts b/docs/markdown-parser/index.ts
new file mode 100644
index 00000000..ac92c64c
--- /dev/null
+++ b/docs/markdown-parser/index.ts
@@ -0,0 +1,109 @@
+import defu from 'defu'
+import remarkEmoji from 'remark-emoji'
+import rehypeSlug from 'rehype-slug'
+import remarkSqueezeParagraphs from 'remark-squeeze-paragraphs'
+import rehypeExternalLinks from 'rehype-external-links'
+import remarkGfm from 'remark-gfm'
+import rehypeSortAttributeValues from 'rehype-sort-attribute-values'
+import rehypeSortAttributes from 'rehype-sort-attributes'
+import rehypeRaw from 'rehype-raw'
+import { parseFrontMatter } from 'remark-mdc'
+import type { MarkdownOptions, MarkdownParsedContent, Toc } from '../types'
+import { generateToc } from './toc'
+import { contentHeading, generateBody } from './content'
+
+export const useDefaultOptions = (): MarkdownOptions => ({
+ mdc: true,
+ toc: {
+ depth: 2,
+ searchDepth: 2,
+ },
+ tags: {},
+ remarkPlugins: {
+ 'remark-emoji': {
+ instance: remarkEmoji,
+ },
+ 'remark-squeeze-paragraphs': {
+ instance: remarkSqueezeParagraphs,
+ },
+ 'remark-gfm': {
+ instance: remarkGfm,
+ },
+ },
+ rehypePlugins: {
+ 'rehype-slug': {
+ instance: rehypeSlug,
+ },
+ 'rehype-external-links': {
+ instance: rehypeExternalLinks,
+ },
+ 'rehype-sort-attribute-values': {
+ instance: rehypeSortAttributeValues,
+ },
+ 'rehype-sort-attributes': {
+ instance: rehypeSortAttributes,
+ },
+ 'rehype-raw': {
+ instance: rehypeRaw,
+ passThrough: ['element'],
+ },
+ },
+})
+
+export async function parse(file: string, userOptions: Partial = {}) {
+ const options = defu(userOptions, useDefaultOptions()) as MarkdownOptions
+
+ const { content, data } = await parseFrontMatter(file)
+
+ // Compile markdown from file content to JSON
+ const body = await generateBody(content, { ...options, data })
+
+ /**
+ * generate toc if it is not disabled in front-matter
+ */
+ let toc: Toc | undefined
+ if (data.toc !== false) {
+ const tocOption = defu(data.toc || {}, options.toc)
+ toc = generateToc(body, tocOption)
+ }
+
+ const excerptString = useExcerpt(content)
+ const excerpt = excerptString
+ ? await generateBody(excerptString, { ...options, data })
+ : undefined
+
+ /**
+ * Process content headings
+ */
+ const heading = contentHeading(body)
+
+ return <{ meta: Partial; body: MarkdownParsedContent['body'] }> {
+ body: {
+ ...body,
+ toc,
+ },
+ meta: {
+ _empty: content.trim().length === 0,
+ title: heading.title,
+ description: heading.description,
+ excerpt,
+ ...data,
+ },
+ }
+}
+
+function useExcerpt(content: string, delimiter = //i) {
+ if (!delimiter)
+ return ''
+
+ // if enabled, get the excerpt defined after front-matter
+ let idx = -1
+ const match = delimiter.exec(content)
+ if (match)
+ idx = match.index
+
+ if (idx !== -1)
+ return content.slice(0, idx)
+
+ return content
+}
diff --git a/docs/markdown-parser/toc.ts b/docs/markdown-parser/toc.ts
new file mode 100644
index 00000000..5959f2d7
--- /dev/null
+++ b/docs/markdown-parser/toc.ts
@@ -0,0 +1,71 @@
+import type { MarkdownNode, MarkdownRoot, Toc, TocLink } from '../types'
+import { flattenNode, flattenNodeText } from './utils/ast'
+
+const TOC_TAGS = ['h2', 'h3', 'h4', 'h5', 'h6']
+
+const TOC_TAGS_DEPTH = TOC_TAGS.reduce((tags: any, tag: string) => {
+ tags[tag] = Number(tag.charAt(tag.length - 1))
+ return tags
+}, {})
+
+const getHeaderDepth = (node: MarkdownNode): number => TOC_TAGS_DEPTH[node.tag as string]
+
+const getTocTags = (depth: number): string[] => {
+ if (depth < 1 || depth > 5) {
+ // eslint-disable-next-line
+ console.log(`\`toc.depth\` is set to ${depth}. It should be a number between 1 and 5. `)
+ depth = 1
+ }
+
+ return TOC_TAGS.slice(0, depth)
+}
+
+function nestHeaders(headers: TocLink[]): TocLink[] {
+ if (headers.length <= 1)
+ return headers
+
+ const toc: TocLink[] = []
+ let parent: TocLink
+ headers.forEach((header) => {
+ if (!parent || header.depth <= parent.depth) {
+ header.children = []
+ parent = header
+ toc.push(header)
+ }
+ else {
+ parent.children!.push(header)
+ }
+ })
+ toc.forEach((header) => {
+ if (header.children?.length)
+ header.children = nestHeaders(header.children)
+ else
+ delete header.children
+ })
+ return toc
+}
+
+export function generateFlatToc(body: MarkdownRoot, options: Toc): Toc {
+ const { searchDepth, depth, title = '' } = options
+ const tags = getTocTags(depth)
+
+ const headers = flattenNode(body, searchDepth).filter((node: MarkdownNode) => tags.includes(node.tag || ''))
+
+ const links: TocLink[] = headers.map(node => ({
+ id: node.props?.id,
+ depth: getHeaderDepth(node),
+ text: flattenNodeText(node),
+ }))
+ return {
+ title,
+ searchDepth,
+ depth,
+ links,
+ }
+}
+
+export function generateToc(body: MarkdownRoot, options: Toc): Toc {
+ const toc = generateFlatToc(body, options)
+ toc.links = nestHeaders(toc.links)
+ return toc
+}
diff --git a/docs/markdown-parser/utils/ast.ts b/docs/markdown-parser/utils/ast.ts
new file mode 100644
index 00000000..f3a329f5
--- /dev/null
+++ b/docs/markdown-parser/utils/ast.ts
@@ -0,0 +1,35 @@
+import type { MarkdownNode } from '../../types'
+
+export function flattenNodeText(node: MarkdownNode): string {
+ if (node.type === 'text') {
+ return node.value || ''
+ }
+ else {
+ return (node.children || []).reduce((text: string, child: MarkdownNode) => {
+ return text.concat(flattenNodeText(child))
+ }, '')
+ }
+}
+
+export function flattenNode(node: MarkdownNode, maxDepth = 2, _depth = 0): Array {
+ if (!Array.isArray(node.children) || _depth === maxDepth)
+ return [node]
+
+ return [
+ node,
+ ...node.children.reduce((acc, child) => acc.concat(flattenNode(child, maxDepth, _depth + 1)), [] as Array),
+ ]
+}
+
+export function setNodeData(node: MarkdownNode & { data: any }, name: string, value: any, pageData: any) {
+ if (!name.startsWith(':'))
+ name = `:${name}`
+
+ const dataKey = `content_d_${randomHash()}`
+ pageData[dataKey] = value
+ node.data.hProperties[name] = dataKey
+}
+
+function randomHash() {
+ return Math.random().toString(36).substr(2, 16)
+}
diff --git a/docs/markdown-parser/utils/index.ts b/docs/markdown-parser/utils/index.ts
new file mode 100644
index 00000000..49f68a0a
--- /dev/null
+++ b/docs/markdown-parser/utils/index.ts
@@ -0,0 +1,2 @@
+export * from './ast'
+export * from './node'
diff --git a/docs/markdown-parser/utils/node.ts b/docs/markdown-parser/utils/node.ts
new file mode 100644
index 00000000..93136074
--- /dev/null
+++ b/docs/markdown-parser/utils/node.ts
@@ -0,0 +1,110 @@
+import type { VNode } from 'vue'
+import type { MarkdownNode } from '../../types'
+
+/**
+ * List of text nodes
+ */
+export const TEXT_TAGS = ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'li']
+
+/**
+ * Check virtual node's tag
+ * @param vnode Virtuel node from Vue virtual DOM
+ * @param tag tag name
+ * @returns `true` it the virtual node match the tag
+ */
+export function isTag(vnode: VNode | MarkdownNode, tag: string | symbol): boolean {
+ // Vue 3 uses `type` instead of `tag`
+ if (vnode.type === tag)
+ return true
+
+ // Vue 3 VNode `type` can be an object (tag is provided by ContentRendererMarkdown)
+ if (typeof vnode.type === 'object' && (vnode.type as any).tag === tag)
+ return true
+
+ // Markdown node
+ if ((vnode as MarkdownNode).tag === tag)
+ return true
+
+ return false
+}
+
+/**
+ * Check if virtual node is text node
+ */
+export function isText(vnode: VNode | MarkdownNode): boolean {
+ return isTag(vnode, 'text') || typeof vnode.children === 'string'
+}
+
+/**
+ * Find children of a virtual node
+ * @param vnode Virtuel node from Vue virtual DOM
+ * @returns Children of given node
+ */
+export function nodeChildren(node: VNode | MarkdownNode) {
+ if (Array.isArray(node.children) || typeof node.children === 'string')
+ return node.children
+
+ // Vue3 VNode children
+ if (typeof node.children.default === 'function')
+ return node.children.default()
+
+ return []
+}
+
+/**
+ * Calculate text content of a virtual node
+ * @param vnode Virtuel node from Vue virtual DOM
+ * @returns text content of given node
+ */
+export function nodeTextContent(node: VNode | MarkdownNode): string {
+ // Return empty string is vnode is falsy
+ if (!node)
+ return ''
+
+ if (Array.isArray(node))
+ return node.map(nodeTextContent).join('')
+
+ if (isText(node))
+ return node.children as string || (node as MarkdownNode).value
+
+ // Walk through node children
+ const children = nodeChildren(node)
+ if (Array.isArray(children))
+ return children.map(nodeTextContent).join('')
+
+ // Return empty string for non-text nodes without any children
+ return ''
+}
+
+/**
+ * Unwrap tags within a virtual node
+ * @param vnode Virtuel node from Vue virtual DOM
+ * @param tags list of tags to unwrap
+ * @returns
+ */
+export function unwrap(vnode: VNode, tags = ['p']): VNode | VNode[] {
+ if (Array.isArray(vnode))
+ return vnode.flatMap(node => unwrap(node, tags))
+
+ let result: VNode | VNode[] = vnode
+
+ // unwrap children
+ if (tags.some(tag => tag === '*' || isTag(vnode, tag))) {
+ result = nodeChildren(vnode) || vnode
+ if (!Array.isArray(result) && TEXT_TAGS.some(tag => isTag(vnode, tag)))
+ result = [result]
+ }
+
+ return result
+}
+
+export function flatUnwrap(vnodes: VNode | VNode[], tags = ['p']): VNode[] {
+ vnodes = Array.isArray(vnodes) ? vnodes : [vnodes]
+
+ if (!tags.length)
+ return vnodes
+
+ return vnodes
+ .flatMap(vnode => flatUnwrap(unwrap(vnode, [tags[0]]), tags.slice(1)))
+ .filter(vnode => !(isText(vnode) && nodeTextContent(vnode).trim() === ''))
+}
diff --git a/docs/module.ts b/docs/module.ts
new file mode 100644
index 00000000..6f1bb27f
--- /dev/null
+++ b/docs/module.ts
@@ -0,0 +1,717 @@
+import fs from 'fs'
+import {
+ addComponentsDir,
+ addImports,
+ addPlugin,
+ addTemplate,
+ createResolver,
+ defineNuxtModule,
+ extendViteConfig,
+ resolveModule,
+} from '@nuxt/kit'
+import { genImport, genSafeVariableName } from 'knitwork'
+import type { ListenOptions } from 'listhen'
+import defu from 'defu'
+import { hash } from 'ohash'
+import { join, relative } from 'pathe'
+import type { Lang as ShikiLang, Theme as ShikiTheme } from 'shiki-es'
+import { listen } from 'listhen'
+import type { WatchEvent } from 'unstorage'
+import { createStorage } from 'unstorage'
+import { joinURL, withLeadingSlash, withTrailingSlash } from 'ufo'
+import { name, version } from '../package.json'
+import {
+ CACHE_VERSION,
+ MOUNT_PREFIX,
+ PROSE_TAGS,
+ createWebSocket,
+ getMountDriver,
+ logger,
+ processMarkdownOptions,
+ useContentMounts,
+} from './utils'
+import type { MarkdownPlugin, QueryBuilderParams } from './runtime/types'
+
+export type MountOptions = {
+ driver: 'fs' | 'http' | string
+ name?: string
+ prefix?: string
+ [options: string]: any
+}
+
+export interface ModuleOptions {
+ /**
+ * Base route that will be used for content api
+ *
+ * @default '_content'
+ * @deprecated Use `api.base` instead
+ */
+ base: string
+ api: {
+ /**
+ * Base route that will be used for content api
+ *
+ * @default '/api/_content'
+ */
+ baseURL: string
+ }
+ /**
+ * Disable content watcher and hot content reload.
+ * Note: Watcher is a development feature and will not includes in the production.
+ *
+ * @default true
+ */
+ watch: false | {
+ ws: Partial
+ }
+ /**
+ * Contents can located in multiple places, in multiple directories or even in remote git repositories.
+ * Using sources option you can tell Content module where to look for contents.
+ *
+ * @default ['content']
+ */
+ sources: Record | Array
+ /**
+ * List of ignore pattern that will be used for excluding content from parsing and rendering.
+ *
+ * @default ['\\.', '-']
+ */
+ ignores: Array
+ /**
+ * Content module uses `remark` and `rehype` under the hood to compile markdown files.
+ * You can modify this options to control its behavior.
+ */
+ markdown: {
+ /**
+ * Whether MDC syntax should be supported or not.
+ *
+ * @default true
+ */
+ mdc?: boolean
+ /**
+ * Control behavior of Table of Contents generation
+ */
+ toc?: {
+ /**
+ * Maximum heading depth that includes in the table of contents.
+ *
+ * @default 2
+ */
+ depth?: number
+ /**
+ * Maximum depth of nested tags to search for heading.
+ *
+ * @default 2
+ */
+ searchDepth?: number
+ }
+ /**
+ * Tags will be used to replace markdown components and render custom components instead of default ones.
+ *
+ * @default {}
+ */
+ tags?: Record
+ /**
+ * Register custom remark plugin to provide new feature into your markdown contents.
+ * Checkout: https://github.com/remarkjs/remark/blob/main/doc/plugins.md
+ *
+ * @default []
+ */
+ remarkPlugins?: Array | Record
+ /**
+ * Register custom remark plugin to provide new feature into your markdown contents.
+ * Checkout: https://github.com/rehypejs/rehype/blob/main/doc/plugins.md
+ *
+ * @default []
+ */
+ rehypePlugins?: Array | Record
+ /**
+ * Anchor link generation config
+ *
+ * @default {}
+ */
+ anchorLinks?: boolean | {
+ /**
+ * Sets the maximal depth for anchor link generation
+ *
+ * @default 4
+ */
+ depth?: number
+ /**
+ * Excludes headings from link generation when they are in the depth range.
+ *
+ * @default [1]
+ */
+ exclude?: number[]
+ }
+ }
+ /**
+ * Content module uses `shiki` to highlight code blocks.
+ * You can configure Shiki options to control its behavior.
+ */
+ highlight: false | {
+ /**
+ * Default theme that will be used for highlighting code blocks.
+ */
+ theme?: ShikiTheme | {
+ default: ShikiTheme
+ [theme: string]: ShikiTheme
+ }
+ /**
+ * Preloaded languages that will be available for highlighting code blocks.
+ */
+ preload?: ShikiLang[]
+ }
+ /**
+ * Options for yaml parser.
+ *
+ * @default {}
+ */
+ yaml: false | Record
+ /**
+ * Options for yaml parser.
+ *
+ * @default {}
+ */
+ csv: false | {
+ json?: boolean
+ delimeter?: string
+ }
+ /**
+ * Enable/Disable navigation.
+ *
+ * @default {}
+ */
+ navigation: false | {
+ fields: Array
+ }
+ /**
+ * List of locale codes.
+ * This codes will be used to detect contents locale.
+ *
+ * @default []
+ */
+ locales: Array
+ /**
+ * Default locale for top level contents.
+ *
+ * @default undefined
+ */
+ defaultLocale?: string
+ /**
+ * Document-driven mode config
+ *
+ * @default false
+ */
+ documentDriven: boolean | {
+ page?: boolean
+ navigation?: boolean
+ surround?: boolean
+ globals?: {
+ [key: string]: QueryBuilderParams
+ }
+ layoutFallbacks?: string[]
+ injectPage?: boolean
+ }
+ experimental: {
+ clientDB: boolean
+ stripQueryParameters: boolean
+ }
+}
+
+interface ContentContext extends ModuleOptions {
+ base: Readonly
+ transformers: Array
+}
+
+export interface ModuleHooks {
+ 'content:context'(ctx: ContentContext): void
+}
+
+export default defineNuxtModule({
+ meta: {
+ name,
+ version,
+ configKey: 'content',
+ compatibility: {
+ nuxt: '^3.0.0-rc.3',
+ },
+ },
+ defaults: {
+ // @deprecated
+ base: '',
+ api: {
+ baseURL: '/api/_content',
+ },
+ watch: {
+ ws: {
+ port: {
+ port: 4000,
+ portRange: [4000, 4040],
+ },
+ hostname: 'localhost',
+ showURL: false,
+ },
+ },
+ sources: {},
+ ignores: ['\\.', '-'],
+ locales: [],
+ defaultLocale: undefined,
+ highlight: false,
+ markdown: {
+ tags: Object.fromEntries(PROSE_TAGS.map(t => [t, `prose-${t}`])),
+ anchorLinks: {
+ depth: 4,
+ exclude: [1],
+ },
+ },
+ yaml: {},
+ csv: {
+ delimeter: ',',
+ json: true,
+ },
+ navigation: {
+ fields: [],
+ },
+ documentDriven: false,
+ experimental: {
+ clientDB: false,
+ stripQueryParameters: false,
+ },
+ },
+ async setup(options, nuxt) {
+ const { resolve } = createResolver(import.meta.url)
+ const resolveRuntimeModule = (path: string) => resolveModule(path, { paths: resolve('./runtime') })
+ // Ensure default locale alway is the first item of locales
+ options.locales = Array.from(new Set([options.defaultLocale, ...options.locales].filter(Boolean))) as string[]
+
+ // Disable cache in dev mode
+ const buildIntegrity = nuxt.options.dev ? undefined : Date.now()
+
+ if (options.base) {
+ logger.warn('content.base is deprecated. Use content.api.baseURL instead.')
+ options.api.baseURL = withLeadingSlash(joinURL('api', options.base))
+ }
+
+ const contentContext: ContentContext = {
+ transformers: [],
+ ...options,
+ }
+
+ // Add Vite configurations
+ extendViteConfig((config) => {
+ config.optimizeDeps = config.optimizeDeps || {}
+ config.optimizeDeps.include = config.optimizeDeps.include || []
+ config.optimizeDeps.include.push(
+ 'html-tags', 'slugify',
+ )
+ })
+
+ nuxt.hook('nitro:config', (nitroConfig) => {
+ // Init Nitro context
+ nitroConfig.prerender = nitroConfig.prerender || {}
+ nitroConfig.prerender.routes = nitroConfig.prerender.routes || []
+ nitroConfig.handlers = nitroConfig.handlers || []
+
+ // Add server handlers
+ nitroConfig.handlers.push(
+ {
+ method: 'get',
+ route: `${options.api.baseURL}/query/:qid/**:params`,
+ handler: resolveRuntimeModule('./server/api/query'),
+ },
+ {
+ method: 'get',
+ route: `${options.api.baseURL}/query/:qid`,
+ handler: resolveRuntimeModule('./server/api/query'),
+ },
+ {
+ method: 'get',
+ route: `${options.api.baseURL}/query`,
+ handler: resolveRuntimeModule('./server/api/query'),
+ },
+ {
+ method: 'get',
+ route: nuxt.options.dev
+ ? `${options.api.baseURL}/cache.json`
+ : `${options.api.baseURL}/cache.${buildIntegrity}.json`,
+ handler: resolveRuntimeModule('./server/api/cache'),
+ },
+ )
+
+ if (!nuxt.options.dev)
+ nitroConfig.prerender.routes.unshift(`${options.api.baseURL}/cache.${buildIntegrity}.json`)
+
+ // Register source storages
+ const sources = useContentMounts(nuxt, contentContext.sources)
+ nitroConfig.devStorage = Object.assign(nitroConfig.devStorage || {}, sources)
+ nitroConfig.devStorage['cache:content'] = {
+ driver: 'fs',
+ base: resolve(nuxt.options.buildDir, 'content-cache'),
+ }
+
+ // Tell Nuxt to ignore content dir for app build
+ for (const source of Object.values(sources)) {
+ // Only targets directories inside the srcDir
+ if (source.driver === 'fs' && source.base.includes(nuxt.options.srcDir)) {
+ const wildcard = join(source.base, '**/*').replace(withTrailingSlash(nuxt.options.srcDir), '')
+ nuxt.options.ignore.push(
+ // Remove `srcDir` from the path
+ wildcard,
+ `!${wildcard}.vue`,
+ )
+ }
+ }
+ nitroConfig.bundledStorage = nitroConfig.bundledStorage || []
+ nitroConfig.bundledStorage.push('/cache/content')
+
+ // @ts-expect-error
+ nitroConfig.externals = defu(typeof nitroConfig.externals === 'object' ? nitroConfig.externals : {}, {
+ inline: [
+ // Inline module runtime in Nitro bundle
+ resolve('./runtime'),
+ ],
+ })
+
+ nitroConfig.alias = nitroConfig.alias || {}
+ nitroConfig.alias['#content/server'] = resolveRuntimeModule('./server')
+
+ const transformers = contentContext.transformers.map((t) => {
+ const name = `${genSafeVariableName(relative(nuxt.options.rootDir, t)).replace(/_(45|46|47)/g, '_')}_${hash(t)}`
+ return { name, import: genImport(t, name) }
+ })
+
+ nitroConfig.virtual = nitroConfig.virtual || {}
+ nitroConfig.virtual['#content/virtual/transformers'] = [
+ ...transformers.map(t => t.import),
+ `export const transformers = [${transformers.map(t => t.name).join(', ')}]`,
+ 'export const getParser = (ext) => transformers.find(p => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.parse)',
+ 'export const getTransformers = (ext) => transformers.filter(p => ext.match(new RegExp(p.extensions.join("|"), "i")) && p.transform)',
+ 'export default () => {}',
+ ].join('\n')
+ })
+
+ // Register composables
+ addImports([
+ { name: 'queryContent', as: 'queryContent', from: resolveRuntimeModule('./composables/query') },
+ { name: 'useContentHelpers', as: 'useContentHelpers', from: resolveRuntimeModule('./composables/helpers') },
+ { name: 'useContentHead', as: 'useContentHead', from: resolveRuntimeModule('./composables/head') },
+ { name: 'withContentBase', as: 'withContentBase', from: resolveRuntimeModule('./composables/utils') },
+ { name: 'useUnwrap', as: 'useUnwrap', from: resolveRuntimeModule('./composables/utils') },
+ ])
+
+ // Register components
+ await addComponentsDir({
+ path: resolve('./runtime/components'),
+ pathPrefix: false,
+ prefix: '',
+ global: true,
+ })
+
+ const typesPath = addTemplate({
+ filename: 'types/content.d.ts',
+ getContents: () => [
+ 'declare module \'#content/server\' {',
+ ` const serverQueryContent: typeof import('${resolve('./runtime/server')}').serverQueryContent`,
+ ` const parseContent: typeof import('${resolve('./runtime/server')}').parseContent`,
+ '}',
+ ].join('\n'),
+ }).dst
+
+ nuxt.hook('prepare:types', (options) => {
+ options.references.push({ path: typesPath })
+ })
+
+ // Register user global components
+ const _layers = [...nuxt.options._layers].reverse()
+ for (const layer of _layers) {
+ const srcDir = layer.config.srcDir
+ const globalComponents = resolve(srcDir, 'components/content')
+ const dirStat = await fs.promises.stat(globalComponents).catch(() => null)
+ if (dirStat && dirStat.isDirectory()) {
+ nuxt.hook('components:dirs', (dirs) => {
+ dirs.unshift({
+ path: globalComponents,
+ global: true,
+ pathPrefix: false,
+ prefix: '',
+ })
+ })
+ }
+ }
+
+ // Register navigation
+ if (options.navigation) {
+ addImports({ name: 'fetchContentNavigation', as: 'fetchContentNavigation', from: resolveRuntimeModule('./composables/navigation') })
+
+ nuxt.hook('nitro:config', (nitroConfig) => {
+ nitroConfig.handlers = nitroConfig.handlers || []
+ nitroConfig.handlers.push(
+ {
+ method: 'get',
+ route: `${options.api.baseURL}/navigation/:qid/**:params`,
+ handler: resolveRuntimeModule('./server/api/navigation'),
+ }, {
+ method: 'get',
+ route: `${options.api.baseURL}/navigation/:qid`,
+ handler: resolveRuntimeModule('./server/api/navigation'),
+ },
+ {
+ method: 'get',
+ route: `${options.api.baseURL}/navigation`,
+ handler: resolveRuntimeModule('./server/api/navigation'),
+ },
+ )
+ })
+ }
+ else {
+ addImports({ name: 'navigationDisabled', as: 'fetchContentNavigation', from: resolveRuntimeModule('./composables/utils') })
+ }
+
+ // Register document-driven
+ if (options.documentDriven) {
+ // Enable every feature by default
+ const defaultDocumentDrivenConfig = {
+ page: true,
+ navigation: true,
+ surround: true,
+ globals: {},
+ layoutFallbacks: ['theme'],
+ injectPage: true,
+ }
+
+ // If set to true, use defaults else merge defaults with user config
+ if (options.documentDriven === true) {
+ options.documentDriven = defaultDocumentDrivenConfig
+ }
+ else {
+ options.documentDriven = {
+ ...defaultDocumentDrivenConfig,
+ ...options.documentDriven,
+ }
+ }
+
+ // Support layout field by default
+ if (options.navigation)
+ options.navigation.fields.push('layout')
+
+ addImports([
+ { name: 'useContentState', as: 'useContentState', from: resolveRuntimeModule('./composables/content') },
+ { name: 'useContent', as: 'useContent', from: resolveRuntimeModule('./composables/content') },
+ ])
+
+ addPlugin(resolveRuntimeModule('./plugins/documentDriven'))
+
+ if (options.documentDriven.injectPage) {
+ nuxt.options.pages = true
+
+ nuxt.hook('pages:extend', (pages) => {
+ // Respect user's custom catch-all page
+ if (!pages.find(page => page.path === '/:slug(.*)*')) {
+ pages.unshift({
+ name: 'slug',
+ path: '/:slug(.*)*',
+ file: resolveRuntimeModule('./pages/document-driven.vue'),
+ children: [],
+ })
+ }
+ })
+ nuxt.hook('app:resolve', async (app) => {
+ if (app.mainComponent?.includes('@nuxt/ui-templates')) {
+ app.mainComponent = resolveRuntimeModule('./app.vue')
+ }
+ else {
+ const appContent = await fs.promises.readFile(app.mainComponent!, { encoding: 'utf-8' })
+ if (appContent.includes('` inside `app.vue` will cause unwanted layout shifting in your application.',
+ 'Consider removing `` from `app.vue` and using it in your pages.',
+ ].join(''))
+ }
+ }
+ })
+ }
+ }
+ else {
+ // Noop useContent
+ addImports([
+ { name: 'useContentDisabled', as: 'useContentState', from: resolveRuntimeModule('./composables/utils') },
+ { name: 'useContentDisabled', as: 'useContent', from: resolveRuntimeModule('./composables/utils') },
+ ])
+ }
+
+ // @ts-expect-error
+ await nuxt.callHook('content:context', contentContext)
+
+ contentContext.defaultLocale = contentContext.defaultLocale || contentContext.locales[0]
+
+ // Generate cache integrity based on content context
+ const cacheIntegrity = hash({
+ locales: options.locales,
+ options: options.defaultLocale,
+ markdown: options.markdown,
+ hightlight: options.highlight,
+ })
+
+ // Process markdown plugins, resovle paths
+ contentContext.markdown = processMarkdownOptions(contentContext.markdown)
+
+ nuxt.options.runtimeConfig.public.content = defu(nuxt.options.runtimeConfig.public.content, {
+ locales: options.locales,
+ defaultLocale: contentContext.defaultLocale,
+ integrity: buildIntegrity,
+ experimental: {
+ stripQueryParameters: options.experimental.stripQueryParameters,
+ clientDB: options.experimental.clientDB && nuxt.options.ssr === false,
+ },
+ api: {
+ baseURL: options.api.baseURL,
+ },
+ navigation: contentContext.navigation as any,
+ // Tags will use in markdown renderer for component replacement
+ tags: contentContext.markdown.tags as any,
+ highlight: options.highlight as any,
+ wsUrl: '',
+ // Document-driven configuration
+ documentDriven: options.documentDriven as any,
+ // Anchor link generation config
+ anchorLinks: options.markdown.anchorLinks,
+ })
+
+ // Context will use in server
+ nuxt.options.runtimeConfig.content = defu(nuxt.options.runtimeConfig.content, {
+ cacheVersion: CACHE_VERSION,
+ cacheIntegrity,
+ ...contentContext as any,
+ })
+
+ // @nuxtjs/tailwindcss support
+ // @ts-expect-error - Module might not exist
+ nuxt.hook('tailwindcss:config', (tailwindConfig) => {
+ tailwindConfig.content = tailwindConfig.content ?? []
+ tailwindConfig.content.push(resolve(nuxt.options.buildDir, 'content-cache', 'parsed/**/*.md'))
+ })
+
+ // Setup content dev module
+ if (!nuxt.options.dev) {
+ nuxt.hook('build:before', async () => {
+ const storage = createStorage()
+ const sources = useContentMounts(nuxt, contentContext.sources)
+ sources['cache:content'] = {
+ driver: 'fs',
+ base: resolve(nuxt.options.buildDir, 'content-cache'),
+ }
+ for (const [key, source] of Object.entries(sources))
+ storage.mount(key, getMountDriver(source))
+
+ let keys = await storage.getKeys('content:source')
+
+ // Filter invalid characters & ignore patterns
+ const invalidKeyCharacters = '\'"?#/'.split('')
+ const contentIgnores: Array = contentContext.ignores.map((p: any) =>
+ typeof p === 'string' ? new RegExp(`^${p}|:${p}`) : p,
+ )
+ keys = keys.filter((key) => {
+ if (key.startsWith('preview:') || contentIgnores.some(prefix => prefix.test(key)))
+ return false
+
+ if (invalidKeyCharacters.some(ik => key.includes(ik)))
+ return false
+
+ return true
+ })
+ await Promise.all(
+ keys.map(async key => await storage.setItem(
+ `cache:content:parsed:${key.substring(15)}`,
+ await storage.getItem(key),
+ )),
+ )
+ })
+ return
+ }
+ // ~~ DEV ~~ //
+
+ // Add Content plugin
+ addPlugin(resolveRuntimeModule('./plugins/ws'))
+
+ nuxt.hook('nitro:init', async (nitro) => {
+ if (!options.watch || !options.watch.ws)
+ return
+
+ const ws = createWebSocket()
+
+ // Dispose storage on nuxt close
+ nitro.hooks.hook('close', async () => {
+ await ws.close()
+ })
+
+ // Listen dev server
+ const { server, url } = await listen(() => 'Nuxt Content', options.watch.ws)
+
+ server.on('upgrade', ws.serve)
+
+ // Register ws url
+ nitro.options.runtimeConfig.public.content.wsUrl = url.replace('http', 'ws')
+
+ // Watch contents
+ await nitro.storage.watch(async (event: WatchEvent, key: string) => {
+ // Ignore events that are not related to content
+ if (!key.startsWith(MOUNT_PREFIX))
+ return
+
+ key = key.substring(MOUNT_PREFIX.length)
+
+ // Remove content Index
+ await nitro.storage.removeItem('cache:content:content-index.json')
+
+ // Broadcast a message to the server to refresh the page
+ ws.broadcast({ event, key })
+ })
+ })
+ },
+})
+
+interface ModulePublicRuntimeConfig {
+ experimental: {
+ stripQueryParameters: boolean
+ clientDB: boolean
+ }
+
+ defaultLocale: ModuleOptions['defaultLocale']
+
+ locales: ModuleOptions['locales']
+
+ tags: Record
+
+ base: string
+
+ // Websocket server URL
+ wsUrl?: string
+
+ // Shiki config
+ highlight: ModuleOptions['highlight']
+
+ navigation: ModuleOptions['navigation']
+}
+
+interface ModulePrivateRuntimeConfig {
+ /**
+ * Internal version that represents cache format.
+ * This is used to invalidate cache when the format changes.
+ */
+ cacheVersion: string
+ cacheIntegrity: string
+}
+
+declare module '@nuxt/schema' {
+ interface ConfigSchema {
+ runtimeConfig: {
+ public?: {
+ content?: ModulePublicRuntimeConfig
+ }
+ private?: {
+ content?: ModulePrivateRuntimeConfig & ContentContext
+ }
+ }
+ }
+}
diff --git a/docs/plugins/vue-final-modal.ts b/docs/plugins/vue-final-modal.ts
new file mode 100644
index 00000000..5019a2a2
--- /dev/null
+++ b/docs/plugins/vue-final-modal.ts
@@ -0,0 +1,6 @@
+import { createVfm } from 'vue-final-modal'
+
+export default defineNuxtPlugin((nuxtApp) => {
+ const vfm = createVfm() as any
+ nuxtApp.vueApp.use(vfm)
+})
\ No newline at end of file
diff --git a/docs/transformers/csv/create-tokenizer.ts b/docs/transformers/csv/create-tokenizer.ts
new file mode 100644
index 00000000..aa5e5e85
--- /dev/null
+++ b/docs/transformers/csv/create-tokenizer.ts
@@ -0,0 +1,600 @@
+// Do not edit this file. This is code generated by micromark.
+// See: https://github.com/micromark/micromark/blob/ed234535990d3e968f3c108d03f3235d733c43ac/packages/micromark/dev/lib/create-tokenizer.js
+// @ts-nocheck
+/* eslint-disable */
+/**
+ * @typedef {import('micromark-util-types').Code} Code
+ * @typedef {import('micromark-util-types').Chunk} Chunk
+ * @typedef {import('micromark-util-types').Point} Point
+ * @typedef {import('micromark-util-types').Token} Token
+ * @typedef {import('micromark-util-types').Effects} Effects
+ * @typedef {import('micromark-util-types').State} State
+ * @typedef {import('micromark-util-types').Construct} Construct
+ * @typedef {import('micromark-util-types').InitialConstruct} InitialConstruct
+ * @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord
+ * @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
+ * @typedef {import('micromark-util-types').ParseContext} ParseContext
+ */
+
+/**
+ * @typedef Info
+ * @property {() => void} restore
+ * @property {number} from
+ *
+ * @callback ReturnHandle
+ * Handle a successful run.
+ * @param {Construct} construct
+ * @param {Info} info
+ * @returns {void}
+ */
+ import {markdownLineEnding} from 'micromark-util-character'
+ import {push, splice} from 'micromark-util-chunked'
+ import {resolveAll} from 'micromark-util-resolve-all'
+
+ /**
+ * Create a tokenizer.
+ * Tokenizers deal with one type of data (e.g., containers, flow, text).
+ * The parser is the object dealing with it all.
+ * `initialize` works like other constructs, except that only its `tokenize`
+ * function is used, in which case it doesn’t receive an `ok` or `nok`.
+ * `from` can be given to set the point before the first character, although
+ * when further lines are indented, they must be set with `defineSkip`.
+ *
+ * @param {ParseContext} parser
+ * @param {InitialConstruct} initialize
+ * @param {Omit} [from]
+ * @returns {TokenizeContext}
+ */
+ export function createTokenizer(parser, initialize, from) {
+ /** @type {Point} */
+ let point = Object.assign(
+ from
+ ? Object.assign({}, from)
+ : {
+ line: 1,
+ column: 1,
+ offset: 0
+ },
+ {
+ _index: 0,
+ _bufferIndex: -1
+ }
+ )
+ /** @type {Record} */
+
+ const columnStart = {}
+ /** @type {Construct[]} */
+
+ const resolveAllConstructs = []
+ /** @type {Chunk[]} */
+
+ let chunks = []
+ /** @type {Token[]} */
+
+ let stack = []
+ /** @type {boolean|undefined} */
+
+ let consumed = true
+ /**
+ * Tools used for tokenizing.
+ *
+ * @type {Effects}
+ */
+
+ const effects = {
+ consume,
+ enter,
+ exit,
+ attempt: constructFactory(onsuccessfulconstruct),
+ check: constructFactory(onsuccessfulcheck),
+ interrupt: constructFactory(onsuccessfulcheck, {
+ interrupt: true
+ })
+ }
+ /**
+ * State and tools for resolving and serializing.
+ *
+ * @type {TokenizeContext}
+ */
+
+ const context = {
+ previous: null,
+ code: null,
+ containerState: {},
+ events: [],
+ parser,
+ sliceStream,
+ sliceSerialize,
+ now,
+ defineSkip,
+ write
+ }
+ /**
+ * The state function.
+ *
+ * @type {State|void}
+ */
+
+ let state = initialize.tokenize.call(context, effects)
+ /**
+ * Track which character we expect to be consumed, to catch bugs.
+ *
+ * @type {Code}
+ */
+
+ let expectedCode
+
+ if (initialize.resolveAll) {
+ resolveAllConstructs.push(initialize)
+ }
+
+ return context
+ /** @type {TokenizeContext['write']} */
+
+ function write(slice) {
+ chunks = push(chunks, slice)
+ main() // Exit if we’re not done, resolve might change stuff.
+
+ if (chunks[chunks.length - 1] !== null) {
+ return []
+ }
+
+ addResult(initialize, 0) // Otherwise, resolve, and exit.
+
+ context.events = resolveAll(resolveAllConstructs, context.events, context)
+ return context.events
+ } //
+ // Tools.
+ //
+
+ /** @type {TokenizeContext['sliceSerialize']} */
+
+ function sliceSerialize(token, expandTabs) {
+ return serializeChunks(sliceStream(token), expandTabs)
+ }
+ /** @type {TokenizeContext['sliceStream']} */
+
+ function sliceStream(token) {
+ return sliceChunks(chunks, token)
+ }
+ /** @type {TokenizeContext['now']} */
+
+ function now() {
+ return Object.assign({}, point)
+ }
+ /** @type {TokenizeContext['defineSkip']} */
+
+ function defineSkip(value) {
+ columnStart[value.line] = value.column
+ accountForPotentialSkip()
+ } //
+ // State management.
+ //
+
+ /**
+ * Main loop (note that `_index` and `_bufferIndex` in `point` are modified by
+ * `consume`).
+ * Here is where we walk through the chunks, which either include strings of
+ * several characters, or numerical character codes.
+ * The reason to do this in a loop instead of a call is so the stack can
+ * drain.
+ *
+ * @returns {void}
+ */
+
+ function main() {
+ /** @type {number} */
+ let chunkIndex
+
+ while (point._index < chunks.length) {
+ const chunk = chunks[point._index] // If we’re in a buffer chunk, loop through it.
+
+ if (typeof chunk === 'string') {
+ chunkIndex = point._index
+
+ if (point._bufferIndex < 0) {
+ point._bufferIndex = 0
+ }
+
+ while (
+ point._index === chunkIndex &&
+ point._bufferIndex < chunk.length
+ ) {
+ go(chunk.charCodeAt(point._bufferIndex))
+ }
+ } else {
+ go(chunk)
+ }
+ }
+ }
+ /**
+ * Deal with one code.
+ *
+ * @param {Code} code
+ * @returns {void}
+ */
+
+ function go(code) {
+ consumed = undefined
+ expectedCode = code
+ state = state(code)
+ }
+ /** @type {Effects['consume']} */
+
+ function consume(code) {
+ if (markdownLineEnding(code)) {
+ point.line++
+ point.column = 1
+ point.offset += code === -3 ? 2 : 1
+ accountForPotentialSkip()
+ } else if (code !== -1) {
+ point.column++
+ point.offset++
+ } // Not in a string chunk.
+
+ if (point._bufferIndex < 0) {
+ point._index++
+ } else {
+ point._bufferIndex++ // At end of string chunk.
+ // @ts-expect-error Points w/ non-negative `_bufferIndex` reference
+ // strings.
+
+ if (point._bufferIndex === chunks[point._index].length) {
+ point._bufferIndex = -1
+ point._index++
+ }
+ } // Expose the previous character.
+
+ context.previous = code // Mark as consumed.
+
+ consumed = true
+ }
+ /** @type {Effects['enter']} */
+
+ function enter(type, fields) {
+ /** @type {Token} */
+ // @ts-expect-error Patch instead of assign required fields to help GC.
+ const token = fields || {}
+ token.type = type
+ token.start = now()
+ context.events.push(['enter', token, context])
+ stack.push(token)
+ return token
+ }
+ /** @type {Effects['exit']} */
+
+ function exit(type) {
+ const token = stack.pop()
+ token.end = now()
+ context.events.push(['exit', token, context])
+ return token
+ }
+ /**
+ * Use results.
+ *
+ * @type {ReturnHandle}
+ */
+
+ function onsuccessfulconstruct(construct, info) {
+ addResult(construct, info.from)
+ }
+ /**
+ * Discard results.
+ *
+ * @type {ReturnHandle}
+ */
+
+ function onsuccessfulcheck(_, info) {
+ info.restore()
+ }
+ /**
+ * Factory to attempt/check/interrupt.
+ *
+ * @param {ReturnHandle} onreturn
+ * @param {Record} [fields]
+ */
+
+ function constructFactory(onreturn, fields) {
+ return hook
+ /**
+ * Handle either an object mapping codes to constructs, a list of
+ * constructs, or a single construct.
+ *
+ * @param {Construct|Construct[]|ConstructRecord} constructs
+ * @param {State} returnState
+ * @param {State} [bogusState]
+ * @returns {State}
+ */
+
+ function hook(constructs, returnState, bogusState) {
+ /** @type {Construct[]} */
+ let listOfConstructs
+ /** @type {number} */
+
+ let constructIndex
+ /** @type {Construct} */
+
+ let currentConstruct
+ /** @type {Info} */
+
+ let info
+ return Array.isArray(constructs)
+ ? /* c8 ignore next 1 */
+ handleListOfConstructs(constructs)
+ : 'tokenize' in constructs // @ts-expect-error Looks like a construct.
+ ? handleListOfConstructs([constructs])
+ : handleMapOfConstructs(constructs)
+ /**
+ * Handle a list of construct.
+ *
+ * @param {ConstructRecord} map
+ * @returns {State}
+ */
+
+ function handleMapOfConstructs(map) {
+ return start
+ /** @type {State} */
+
+ function start(code) {
+ const def = code !== null && map[code]
+ const all = code !== null && map.null
+ const list = [
+ // To do: add more extension tests.
+
+ /* c8 ignore next 2 */
+ ...(Array.isArray(def) ? def : def ? [def] : []),
+ ...(Array.isArray(all) ? all : all ? [all] : [])
+ ]
+ return handleListOfConstructs(list)(code)
+ }
+ }
+ /**
+ * Handle a list of construct.
+ *
+ * @param {Construct[]} list
+ * @returns {State}
+ */
+
+ function handleListOfConstructs(list) {
+ listOfConstructs = list
+ constructIndex = 0
+
+ if (list.length === 0) {
+ return bogusState
+ }
+
+ return handleConstruct(list[constructIndex])
+ }
+ /**
+ * Handle a single construct.
+ *
+ * @param {Construct} construct
+ * @returns {State}
+ */
+
+ function handleConstruct(construct) {
+ return start
+ /** @type {State} */
+
+ function start(code) {
+ // To do: not needed to store if there is no bogus state, probably?
+ // Currently doesn’t work because `inspect` in document does a check
+ // w/o a bogus, which doesn’t make sense. But it does seem to help perf
+ // by not storing.
+ info = store()
+ currentConstruct = construct
+
+ if (!construct.partial) {
+ context.currentConstruct = construct
+ }
+
+ if (
+ construct.name &&
+ context.parser.constructs.disable.null.includes(construct.name)
+ ) {
+ return nok(code)
+ }
+
+ return construct.tokenize.call(
+ // If we do have fields, create an object w/ `context` as its
+ // prototype.
+ // This allows a “live binding”, which is needed for `interrupt`.
+ fields ? Object.assign(Object.create(context), fields) : context,
+ effects,
+ ok,
+ nok
+ )(code)
+ }
+ }
+ /** @type {State} */
+
+ function ok(code) {
+ consumed = true
+ onreturn(currentConstruct, info)
+ return returnState
+ }
+ /** @type {State} */
+
+ function nok(code) {
+ consumed = true
+ info.restore()
+
+ if (++constructIndex < listOfConstructs.length) {
+ return handleConstruct(listOfConstructs[constructIndex])
+ }
+
+ return bogusState
+ }
+ }
+ }
+ /**
+ * @param {Construct} construct
+ * @param {number} from
+ * @returns {void}
+ */
+
+ function addResult(construct, from) {
+ if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
+ resolveAllConstructs.push(construct)
+ }
+
+ if (construct.resolve) {
+ splice(
+ context.events,
+ from,
+ context.events.length - from,
+ construct.resolve(context.events.slice(from), context)
+ )
+ }
+
+ if (construct.resolveTo) {
+ context.events = construct.resolveTo(context.events, context)
+ }
+ }
+ /**
+ * Store state.
+ *
+ * @returns {Info}
+ */
+
+ function store() {
+ const startPoint = now()
+ const startPrevious = context.previous
+ const startCurrentConstruct = context.currentConstruct
+ const startEventsIndex = context.events.length
+ const startStack = Array.from(stack)
+ return {
+ restore,
+ from: startEventsIndex
+ }
+ /**
+ * Restore state.
+ *
+ * @returns {void}
+ */
+
+ function restore() {
+ point = startPoint
+ context.previous = startPrevious
+ context.currentConstruct = startCurrentConstruct
+ context.events.length = startEventsIndex
+ stack = startStack
+ accountForPotentialSkip()
+ }
+ }
+ /**
+ * Move the current point a bit forward in the line when it’s on a column
+ * skip.
+ *
+ * @returns {void}
+ */
+
+ function accountForPotentialSkip() {
+ if (point.line in columnStart && point.column < 2) {
+ point.column = columnStart[point.line]
+ point.offset += columnStart[point.line] - 1
+ }
+ }
+ }
+ /**
+ * Get the chunks from a slice of chunks in the range of a token.
+ *
+ * @param {Chunk[]} chunks
+ * @param {Pick} token
+ * @returns {Chunk[]}
+ */
+
+ function sliceChunks(chunks, token) {
+ const startIndex = token.start._index
+ const startBufferIndex = token.start._bufferIndex
+ const endIndex = token.end._index
+ const endBufferIndex = token.end._bufferIndex
+ /** @type {Chunk[]} */
+
+ let view
+
+ if (startIndex === endIndex) {
+ // @ts-expect-error `_bufferIndex` is used on string chunks.
+ view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)]
+ } else {
+ view = chunks.slice(startIndex, endIndex)
+
+ if (startBufferIndex > -1) {
+ // @ts-expect-error `_bufferIndex` is used on string chunks.
+ view[0] = view[0].slice(startBufferIndex)
+ }
+
+ if (endBufferIndex > 0) {
+ // @ts-expect-error `_bufferIndex` is used on string chunks.
+ view.push(chunks[endIndex].slice(0, endBufferIndex))
+ }
+ }
+
+ return view
+ }
+ /**
+ * Get the string value of a slice of chunks.
+ *
+ * @param {Chunk[]} chunks
+ * @param {boolean} [expandTabs=false]
+ * @returns {string}
+ */
+
+ function serializeChunks(chunks, expandTabs) {
+ let index = -1
+ /** @type {string[]} */
+
+ const result = []
+ /** @type {boolean|undefined} */
+
+ let atTab
+
+ while (++index < chunks.length) {
+ const chunk = chunks[index]
+ /** @type {string} */
+
+ let value
+
+ if (typeof chunk === 'string') {
+ value = chunk
+ } else
+ switch (chunk) {
+ case -5: {
+ value = '\r'
+ break
+ }
+
+ case -4: {
+ value = '\n'
+ break
+ }
+
+ case -3: {
+ value = '\r' + '\n'
+ break
+ }
+
+ case -2: {
+ value = expandTabs ? ' ' : '\t'
+ break
+ }
+
+ case -1: {
+ if (!expandTabs && atTab) continue
+ value = ' '
+ break
+ }
+
+ default: {
+ // Currently only replacement character.
+ value = String.fromCharCode(chunk)
+ }
+ }
+
+ atTab = chunk === -2
+ result.push(value)
+ }
+
+ return result.join('')
+ }
+
\ No newline at end of file
diff --git a/docs/transformers/csv/from-csv.ts b/docs/transformers/csv/from-csv.ts
new file mode 100644
index 00000000..964b3136
--- /dev/null
+++ b/docs/transformers/csv/from-csv.ts
@@ -0,0 +1,271 @@
+// Based on mdast-util-from-markdown
+// See: https://github.com/syntax-tree/mdast-util-from-markdown/blob/05875cde264253f0d6a725791f10f55eb8d8c267/dev/lib/index.js
+import { toString } from 'mdast-util-to-string'
+import { preprocess } from 'micromark/lib/preprocess.js'
+import { postprocess } from 'micromark/lib/postprocess.js'
+import { stringifyPosition } from 'unist-util-stringify-position'
+import type { Event, Point as MPoint, Token } from 'micromark-util-types'
+import { parse } from './parser'
+
+type Point = Omit
+type Node = {
+ type: string
+ children: Array
+ position?: {
+ start?: Point
+ end?: Point
+ }
+ value?: string
+}
+
+const own = {}.hasOwnProperty
+
+const initialPoint: Point = {
+ line: 1,
+ column: 1,
+ offset: 0,
+}
+
+export const fromCSV = function (value, encoding?, options?) {
+ if (typeof encoding !== 'string') {
+ options = encoding
+ encoding = undefined
+ }
+
+ return compiler()(
+ postprocess(
+ parse(options).write(preprocess()(value, encoding, true)),
+ ),
+ )
+}
+
+function compiler() {
+ const config = {
+ enter: {
+ column: opener(openColumn),
+ row: opener(openRow),
+ data: onenterdata,
+ quotedData: onenterdata,
+ },
+ exit: {
+ row: closer(),
+ column: closer(),
+ data: onexitdata,
+ quotedData: onexitQuotedData,
+ },
+ }
+
+ return compile
+
+ function compile(events: Array) {
+ const tree: Node = {
+ type: 'root',
+ children: [],
+ }
+
+ const stack = [tree]
+
+ const tokenStack = []
+
+ const context = {
+ stack,
+ tokenStack,
+ config,
+ enter,
+ exit,
+ resume,
+ }
+
+ let index = -1
+
+ while (++index < events.length) {
+ const handler = config[events[index][0]]
+
+ if (own.call(handler, events[index][1].type)) {
+ handler[events[index][1].type].call(
+ Object.assign(
+ {
+ sliceSerialize: events[index][2].sliceSerialize,
+ },
+ context,
+ ),
+ events[index][1],
+ )
+ }
+ }
+
+ if (tokenStack.length > 0) {
+ const tail: Function = tokenStack[tokenStack.length - 1]
+ const handler = tail[1] || defaultOnError
+ handler.call(context, undefined, tail[0])
+ } // Figure out `root` position.
+
+ tree.position = {
+ start: point(
+ events.length > 0 ? events[0][1].start : initialPoint,
+ ),
+ end: point(
+ events.length > 0 ? events[events.length - 2][1].end : initialPoint,
+ ),
+ }
+
+ return tree
+ }
+
+ function point(d: Point): Point {
+ return {
+ line: d.line,
+ column: d.column,
+ offset: d.offset,
+ }
+ }
+
+ function opener(create, and?) {
+ return open
+
+ function open(token: Token) {
+ enter.call(this, create(token), token)
+ if (and)
+ and.call(this, token)
+ }
+ }
+
+ function enter(node: Node, token: Token, errorHandler) {
+ const parent = this.stack[this.stack.length - 1]
+ parent.children.push(node)
+ this.stack.push(node)
+ this.tokenStack.push([token, errorHandler])
+
+ node.position = {
+ start: point(token.start),
+ }
+ return node
+ }
+
+ function closer(and?) {
+ return close
+
+ function close(token: Token) {
+ if (and)
+ and.call(this, token)
+ exit.call(this, token)
+ }
+ }
+
+ function exit(token: Token, onExitError) {
+ const node = this.stack.pop()
+ const open = this.tokenStack.pop()
+
+ if (!open) {
+ throw new Error(
+ `Cannot close \`${
+ token.type
+ }\` (${
+ stringifyPosition({
+ start: token.start,
+ end: token.end,
+ })
+ }): it’s not open`,
+ )
+ }
+ else if (open[0].type !== token.type) {
+ if (onExitError) {
+ onExitError.call(this, token, open[0])
+ }
+ else {
+ const handler = open[1] || defaultOnError
+ handler.call(this, token, open[0])
+ }
+ }
+ node.position.end = point(token.end)
+ return node
+ }
+
+ function resume() {
+ return toString(this.stack.pop())
+ }
+
+ function onenterdata(token: Token) {
+ const parent = this.stack[this.stack.length - 1]
+
+ let tail = parent.children[parent.children.length - 1]
+
+ if (!tail || tail.type !== 'text') {
+ // Add a new text node.
+ tail = text()
+
+ tail.position = {
+ start: point(token.start),
+ }
+
+ parent.children.push(tail)
+ }
+ this.stack.push(tail)
+ }
+
+ function onexitdata(token: Token) {
+ const tail = this.stack.pop()
+ tail.value += this.sliceSerialize(token).trim().replace(/""/g, '"')
+ tail.position.end = point(token.end)
+ }
+ function onexitQuotedData(token: Token) {
+ const tail = this.stack.pop()
+ const value = this.sliceSerialize(token)
+ tail.value += this.sliceSerialize(token).trim().substring(1, value.length - 1).replace(/""/g, '"')
+ tail.position.end = point(token.end)
+ }
+
+ function text() {
+ return {
+ type: 'text',
+ value: '',
+ }
+ }
+
+ function openColumn() {
+ return {
+ type: 'column',
+ children: [],
+ }
+ }
+ function openRow() {
+ return {
+ type: 'row',
+ children: [],
+ }
+ }
+}
+
+function defaultOnError(left, right) {
+ if (left) {
+ throw new Error(
+ `Cannot close \`${
+ left.type
+ }\` (${
+ stringifyPosition({
+ start: left.start,
+ end: left.end,
+ })
+ }): a different token (\`${
+ right.type
+ }\`, ${
+ stringifyPosition({
+ start: right.start,
+ end: right.end,
+ })
+ }) is open`,
+ )
+ }
+ else {
+ throw new Error(
+ `Cannot close document, a token (\`${
+ right.type
+ }\`, ${
+ stringifyPosition({
+ start: right.start,
+ end: right.end,
+ })
+ }) is still open`,
+ )
+ }
+}
diff --git a/docs/transformers/csv/index.ts b/docs/transformers/csv/index.ts
new file mode 100644
index 00000000..2443bfca
--- /dev/null
+++ b/docs/transformers/csv/index.ts
@@ -0,0 +1,60 @@
+import { unified } from 'unified'
+import type { ParsedContent } from '../../types'
+import { defineTransformer } from '../utils'
+import { fromCSV } from './from-csv'
+
+function csvParse(options) {
+ const parser = (doc) => {
+ return fromCSV(doc, options)
+ }
+
+ Object.assign(this, { Parser: parser })
+
+ const toJsonObject = (tree) => {
+ const [header, ...rows] = tree.children
+ const columns = header.children.map(col => col.children[0].value)
+
+ const data = rows.map((row) => {
+ return row.children.reduce((acc, col, i) => {
+ acc[String(columns[i])] = col.children[0]?.value
+ return acc
+ }, {})
+ })
+ return data
+ }
+
+ const toJsonArray = (tree) => {
+ const data = tree.children.map((row) => {
+ return row.children.map(col => col.children[0]?.value)
+ })
+ return data
+ }
+
+ const compiler = (doc) => {
+ if (options.json)
+ return toJsonObject(doc)
+
+ return toJsonArray(doc)
+ }
+
+ Object.assign(this, { Compiler: compiler })
+}
+
+export default defineTransformer({
+ name: 'csv',
+ extensions: ['.csv'],
+ parse: async (_id, content, options = {}) => {
+ const stream = unified().use(csvParse, {
+ delimiter: ',',
+ json: true,
+ ...options,
+ })
+ const { result } = await stream.process(content)
+
+ return {
+ _id,
+ _type: 'csv',
+ body: result,
+ }
+ },
+})
diff --git a/docs/transformers/csv/parser.ts b/docs/transformers/csv/parser.ts
new file mode 100644
index 00000000..fc78430d
--- /dev/null
+++ b/docs/transformers/csv/parser.ts
@@ -0,0 +1,180 @@
+import type { Code, Effects, State, TokenizeContext } from 'micromark-util-types'
+import { markdownLineEnding, markdownSpace } from 'micromark-util-character'
+import { createTokenizer } from './create-tokenizer'
+
+function initializeDocument(this: TokenizeContext, effects: Effects) {
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
+ const self = this
+ const delimiter = ((this.parser as any).delimiter || ',').charCodeAt(0)
+
+ return enterRow
+
+ function enterRow(code: Code): State | void {
+ return effects.attempt(
+ { tokenize: attemptLastLine },
+ (code) => {
+ effects.consume(code)
+ return enterRow
+ },
+ (code) => {
+ effects.enter('row')
+ return enterColumn(code)
+ },
+ )(code)
+ }
+
+ function enterColumn(code: Code): State | void {
+ effects.enter('column')
+ return content(code)
+ }
+
+ function content(code: Code): State | void {
+ if (code === null) {
+ effects.exit('column')
+ effects.exit('row')
+ effects.consume(code)
+ return content
+ }
+ if (code === 34 /** " */)
+ return quotedData(code)
+
+ if (code === delimiter) {
+ // Hanlde:
+ // - "1,,3,4"
+ // - ",2,3,4"
+ if (self.previous === delimiter || markdownLineEnding(self.previous) || self.previous === null) {
+ effects.enter('data')
+ effects.exit('data')
+ }
+ effects.exit('column')
+ effects.enter('columnSeparator')
+ effects.consume(code)
+ effects.exit('columnSeparator')
+ effects.enter('column')
+ return content
+ }
+ if (markdownLineEnding(code)) {
+ effects.exit('column')
+ effects.enter('newline')
+ effects.consume(code)
+ effects.exit('newline')
+ effects.exit('row')
+
+ return enterRow
+ }
+ return data(code)
+ }
+
+ // data
+ function data(code: Code): State | void {
+ effects.enter('data')
+ return dataChunk(code)
+ }
+
+ function dataChunk(code: Code): State | void {
+ if (code === null || markdownLineEnding(code) || code === delimiter) {
+ effects.exit('data')
+ return content(code)
+ }
+ if (code === 92 /** \ */)
+ return escapeCharacter(code)
+
+ effects.consume(code)
+ return dataChunk
+ }
+
+ function escapeCharacter(code: Code): State | void {
+ effects.consume(code)
+ return function (code: Code): State | void {
+ effects.consume(code)
+ return content
+ }
+ }
+
+ function quotedData(code: Code): State | void {
+ effects.enter('quotedData')
+ effects.enter('quotedDataChunk')
+ effects.consume(code)
+
+ return quotedDataChunk
+ }
+
+ function quotedDataChunk(code: Code): State | void {
+ if (code === 92 /** \ */)
+ return escapeCharacter(code)
+
+ if (code === 34) {
+ return effects.attempt(
+ { tokenize: attemptDoubleQuote },
+ (code: Code): State | void => {
+ effects.exit('quotedDataChunk')
+ effects.enter('quotedDataChunk')
+ return quotedDataChunk(code)
+ },
+ (code: Code): State | void => {
+ effects.consume(code)
+ effects.exit('quotedDataChunk')
+ effects.exit('quotedData')
+
+ return content
+ },
+ )(code)
+ }
+ effects.consume(code)
+ return quotedDataChunk
+ }
+}
+
+function attemptDoubleQuote(effects: Effects, ok: State, nok: State) {
+ return startSequence
+
+ function startSequence(code: Code): State | void {
+ if (code !== 34)
+ return nok(code)
+
+ effects.enter('quoteFence')
+ effects.consume(code)
+ return sequence
+ }
+
+ function sequence(code: Code): State | void {
+ if (code !== 34)
+ return nok(code)
+
+ effects.consume(code)
+ effects.exit('quoteFence')
+ return (code: Code): State | void => ok(code)
+ }
+}
+
+function attemptLastLine(effects: Effects, ok: State, nok: State) {
+ return enterLine
+
+ function enterLine(code: Code): State | void {
+ if (!markdownSpace(code) && code !== null)
+ return nok(code)
+
+ effects.enter('emptyLine')
+ return continueLine(code)
+ }
+
+ function continueLine(code: Code): State | void {
+ if (markdownSpace(code)) {
+ effects.consume(code)
+ return continueLine
+ }
+ if (code === null) {
+ effects.exit('emptyLine')
+ return ok(code)
+ }
+ return nok(code)
+ }
+}
+
+export const parse = (options: any) => {
+ return createTokenizer(
+ { ...options },
+ { tokenize: initializeDocument },
+ undefined,
+ )
+}
diff --git a/docs/transformers/index.ts b/docs/transformers/index.ts
new file mode 100644
index 00000000..2640a4e8
--- /dev/null
+++ b/docs/transformers/index.ts
@@ -0,0 +1,69 @@
+import { extname } from 'pathe'
+import { camelCase } from 'scule'
+import type { ContentTransformer, TransformContentOptions } from '../types'
+import csv from './csv'
+import markdown from './markdown'
+import yaml from './yaml'
+import pathMeta from './path-meta'
+import shiki from './shiki'
+import json from './json'
+
+const TRANSFORMERS = [
+ csv,
+ markdown,
+ json,
+ yaml,
+ shiki,
+ pathMeta,
+]
+
+function getParser(ext: string, additionalTransformers: ContentTransformer[] = []): ContentTransformer | undefined {
+ let parser = additionalTransformers.find(p => ext.match(new RegExp(p.extensions.join('|'), 'i')) && p.parse)
+ if (!parser)
+ parser = TRANSFORMERS.find(p => ext.match(new RegExp(p.extensions.join('|'), 'i')) && p.parse)
+
+ return parser
+}
+
+function getTransformers(ext: string, additionalTransformers: ContentTransformer[] = []) {
+ return [
+ ...additionalTransformers.filter(p => ext.match(new RegExp(p.extensions.join('|'), 'i')) && p.transform),
+ ...TRANSFORMERS.filter(p => ext.match(new RegExp(p.extensions.join('|'), 'i')) && p.transform),
+ ]
+}
+
+/**
+ * Parse content file using registered plugins
+ */
+export async function transformContent(id: string, content: string, options: TransformContentOptions = {}) {
+ const { transformers = [] } = options
+ // Call hook before parsing the file
+ const file = { _id: id, body: content }
+
+ const ext = extname(id)
+ const parser = getParser(ext, transformers)
+ if (!parser) {
+ console.warn(`${ext} files are not supported, "${id}" falling back to raw content`)
+ return file
+ }
+
+ const parserOptions = options[camelCase(parser.name)] || {}
+ const parsed = await parser.parse!(file._id, file.body, parserOptions)
+
+ const matchedTransformers = getTransformers(ext, transformers)
+ const result = await matchedTransformers.reduce(async (prev, cur) => {
+ const next = (await prev) || parsed
+
+ const transformOptions = options[camelCase(cur.name)]
+
+ // disable transformer if options is false
+ if (transformOptions === false)
+ return next
+
+ return cur.transform!(next, transformOptions || {})
+ }, Promise.resolve(parsed))
+
+ return result
+}
+
+export { defineTransformer } from './utils'
diff --git a/docs/transformers/json.ts b/docs/transformers/json.ts
new file mode 100644
index 00000000..d175bbcd
--- /dev/null
+++ b/docs/transformers/json.ts
@@ -0,0 +1,38 @@
+import destr from 'destr'
+import type { ParsedContent } from '../types'
+import { defineTransformer } from './utils'
+
+export default defineTransformer({
+ name: 'Json',
+ extensions: ['.json', '.json5'],
+ parse: async (_id, content) => {
+ let parsed
+
+ if (typeof content === 'string') {
+ if (_id.endsWith('json5')) {
+ parsed = (await import('json5').then(m => m.default || m))
+ .parse(content)
+ }
+ else if (_id.endsWith('json')) {
+ parsed = destr(content)
+ }
+ }
+ else {
+ parsed = content
+ }
+
+ // Keep array contents under `body` key
+ if (Array.isArray(parsed)) {
+ console.warn(`JSON array is not supported in ${_id}, moving the array into the \`body\` key`)
+ parsed = {
+ body: parsed,
+ }
+ }
+
+ return {
+ ...parsed,
+ _id,
+ _type: 'json',
+ }
+ },
+})
diff --git a/docs/transformers/markdown.ts b/docs/transformers/markdown.ts
new file mode 100644
index 00000000..6fc324f8
--- /dev/null
+++ b/docs/transformers/markdown.ts
@@ -0,0 +1,38 @@
+import { parse } from '../markdown-parser'
+import type { MarkdownOptions, MarkdownParsedContent, MarkdownPlugin } from '../types'
+import { defineTransformer } from './utils'
+
+export default defineTransformer({
+ name: 'markdown',
+ extensions: ['.md'],
+ parse: async (_id, content, options = {}) => {
+ const config = { ...options } as MarkdownOptions
+ config.rehypePlugins = await importPlugins(config.rehypePlugins)
+ config.remarkPlugins = await importPlugins(config.remarkPlugins)
+
+ const parsed = await parse(content, config)
+
+ return {
+ ...parsed.meta,
+ body: parsed.body,
+ _type: 'markdown',
+ _id,
+ }
+ },
+})
+
+async function importPlugins(plugins: Record = {}) {
+ const resolvedPlugins: Record = {}
+ for (const [name, plugin] of Object.entries(plugins)) {
+ if (plugin) {
+ resolvedPlugins[name] = {
+ instance: plugin.instance || await import(/* @vite-ignore */ name).then(m => m.default || m),
+ ...plugin,
+ }
+ }
+ else {
+ resolvedPlugins[name] = false
+ }
+ }
+ return resolvedPlugins
+}
diff --git a/docs/transformers/path-meta.ts b/docs/transformers/path-meta.ts
new file mode 100644
index 00000000..021da118
--- /dev/null
+++ b/docs/transformers/path-meta.ts
@@ -0,0 +1,103 @@
+import { pascalCase } from 'scule'
+import slugify from 'slugify'
+import { withLeadingSlash, withoutTrailingSlash } from 'ufo'
+import type { ParsedContent } from '../types'
+import { defineTransformer } from './utils'
+
+const SEMVER_REGEX = /^(\d+)(\.\d+)*(\.x)?$/
+
+const describeId = (_id: string) => {
+ const [_source, ...parts] = _id.split(':')
+
+ const [, filename, _extension] = parts[parts.length - 1].match(/(.*)\.([^.]+)$/)
+ parts[parts.length - 1] = filename
+ const _path = parts.join('/')
+
+ return {
+ _source,
+ _path,
+ _extension,
+ _file: _extension ? `${_path}.${_extension}` : _path,
+ }
+}
+
+export default defineTransformer({
+ name: 'path-meta',
+ extensions: ['.*'],
+ transform(content, options: any = {}) {
+ const { locales = [], defaultLocale = 'en' } = options
+ const { _source, _file, _path, _extension } = describeId(content._id)
+ const parts = _path.split('/')
+
+ // Check first part for locale name
+ const _locale = locales.includes(parts[0]) ? parts.shift() : defaultLocale
+
+ const filePath = generatePath(parts.join('/'))
+
+ return {
+ _path: filePath,
+ _dir: filePath.split('/').slice(-2)[0],
+ _draft: isDraft(_path),
+ _partial: isPartial(_path),
+ _locale,
+ ...content,
+ // TODO: move title to Markdown parser
+ title: content.title || generateTitle(refineUrlPart(parts[parts.length - 1])),
+ _source,
+ _file,
+ _extension,
+ }
+ },
+})
+
+/**
+ * When file name ends with `.draft` then it will mark as draft.
+ */
+const isDraft = (path: string): boolean => !!path.match(/\.draft(\/|\.|$)/)
+
+/**
+ * Files or directories that starts with underscore `_` will mark as partial content.
+ */
+const isPartial = (path: string): boolean => path.split(/[:/]/).some(part => part.match(/^_.*/))
+
+/**
+ * Generate path from file name
+ *
+ * @param path file full path
+ * @returns generated slug
+ */
+export const generatePath = (path: string, { forceLeadingSlash = true } = {}): string => {
+ path = path.split('/').map(part => slugify(refineUrlPart(part), { lower: true })).join('/')
+ return forceLeadingSlash ? withLeadingSlash(withoutTrailingSlash(path)) : path
+}
+
+/**
+ * generate title from file path
+ */
+export const generateTitle = (path: string) => path.split(/[\s-]/g).map(pascalCase).join(' ')
+
+/**
+ * Clean up special keywords from path part
+ */
+export function refineUrlPart(name: string): string {
+ name = name.split(/[/:]/).pop()!
+ // Match 1, 1.2, 1.x, 1.2.x, 1.2.3.x,
+ if (SEMVER_REGEX.test(name))
+ return name
+
+ return (
+ name
+ /**
+ * Remove numbering
+ */
+ .replace(/(\d+\.)?(.*)/, '$2')
+ /**
+ * Remove index keyword
+ */
+ .replace(/^index(\.draft)?$/, '')
+ /**
+ * Remove draft keyword
+ */
+ .replace(/\.draft$/, '')
+ )
+}
diff --git a/docs/transformers/shiki/highlighter.ts b/docs/transformers/shiki/highlighter.ts
new file mode 100644
index 00000000..3a3cbce7
--- /dev/null
+++ b/docs/transformers/shiki/highlighter.ts
@@ -0,0 +1,270 @@
+import type { Highlighter, Lang, Theme as ShikiTheme } from 'shiki-es'
+import { BUNDLED_LANGUAGES, BUNDLED_THEMES, getHighlighter } from 'shiki-es'
+import consola from 'consola'
+import type { ModuleOptions } from '../../module'
+import { createSingleton } from '../utils'
+import mdcTMLanguage from './languages/mdc.tmLanguage.json'
+import type { HighlightThemedToken, HighlightThemedTokenLine, HighlighterOptions, MarkdownNode, Theme, TokenColorMap } from './types'
+
+// Re-create logger locally as utils cannot be imported from here
+const logger = consola.withScope('@nuxt/content')
+
+/**
+ * Resolve Shiki compatible lang from string.
+ *
+ * Used to resolve lang from both languages id's and aliases.
+ */
+const resolveLang = (lang: string) =>
+ (BUNDLED_LANGUAGES.find(l => l.id === lang || l.aliases?.includes(lang)))
+
+/**
+ * Resolve Shiki compatible theme from string.
+ */
+const resolveTheme = (theme: string | Record): Record | undefined => {
+ if (!theme)
+ return
+
+ if (typeof theme === 'string') {
+ theme = {
+ default: theme,
+ }
+ }
+
+ return Object.entries(theme).reduce((acc, [key, value]) => {
+ acc[key] = BUNDLED_THEMES.find(t => t === value)!
+ return acc
+ }, {} as Record)
+}
+
+export const useShikiHighlighter = createSingleton((opts?: Exclude) => {
+ // Grab highlighter config from publicRuntimeConfig
+ const { theme, preload } = opts || {}
+
+ let promise: Promise | undefined
+ const getShikiHighlighter = () => {
+ if (!promise) {
+ // Initialize highlighter with defaults
+ promise = getHighlighter({
+ theme: (theme as any)?.default || theme || 'dark-plus',
+ langs: [
+ ...(preload || []),
+ 'diff',
+ 'json',
+ 'js',
+ 'ts',
+ 'css',
+ 'shell',
+ 'html',
+ 'md',
+ 'yaml',
+ 'vue',
+ {
+ id: 'md',
+ scopeName: 'text.markdown.mdc',
+ path: 'mdc.tmLanguage.json',
+ aliases: ['markdown', 'md', 'mdc'],
+ grammar: mdcTMLanguage,
+ },
+ ] as any[],
+ }).then((highlighter) => {
+ // Load all themes on-demand
+ const themes = Object.values(typeof theme === 'string' ? { default: theme } : (theme || {}))
+
+ if (themes.length) {
+ return Promise
+ .all(themes.map(theme => highlighter.loadTheme(theme)))
+ .then(() => highlighter)
+ }
+ return highlighter
+ })
+ }
+ return promise
+ }
+
+ const getHighlightedTokens = async (code: string, lang: Lang, theme: Theme) => {
+ const highlighter = await getShikiHighlighter()
+ // Remove trailing carriage returns
+ code = code.replace(/\n+$/, '')
+ // Resolve lang & theme (i.e check if shiki supports them)
+ lang = (resolveLang(lang || '')?.id || lang) as Lang
+ theme = resolveTheme(theme || '') || { default: highlighter.getTheme() as any as ShikiTheme }
+
+ // Skip highlight if lang is not supported
+ if (!lang)
+ return [[{ content: code }]]
+
+ // Load supported language on-demand
+ if (!highlighter.getLoadedLanguages().includes(lang)) {
+ const languageRegistration = resolveLang(lang)
+
+ if (languageRegistration) {
+ await highlighter.loadLanguage(languageRegistration)
+ }
+ else {
+ logger.warn(`Language '${lang}' is not supported by shiki. Skipping highlight.`)
+ return [[{ content: code }]]
+ }
+ }
+
+ // Load supported theme on-demand
+ const newThemes = Object.values(theme).filter(t => !highlighter.getLoadedThemes().includes(t))
+ if (newThemes.length)
+ await Promise.all(newThemes.map(highlighter.loadTheme))
+
+ // Highlight code
+ const coloredTokens = Object.entries(theme).map(([key, theme]) => {
+ const tokens = highlighter.codeToThemedTokens(code, lang, theme, { includeExplanation: false })
+ return {
+ key,
+ theme,
+ tokens,
+ }
+ })
+
+ const highlightedCode: HighlightThemedToken[][] = []
+ for (const line in coloredTokens[0].tokens) {
+ highlightedCode[line] = coloredTokens.reduce((acc, color) => {
+ return mergeLines({
+ key: coloredTokens[0].key,
+ tokens: acc,
+ }, {
+ key: color.key,
+ tokens: color.tokens[line],
+ })
+ }, coloredTokens[0].tokens[line] as HighlightThemedToken[])
+ }
+
+ return highlightedCode
+ }
+
+ const getHighlightedAST = async (code: string, lang: Lang, theme: Theme, opts?: Partial): Promise> => {
+ const lines = await getHighlightedTokens(code, lang, theme)
+ const { highlights = [], colorMap = {} } = opts || {}
+
+ return lines.map((line, lineIndex) => ({
+ type: 'element',
+ tag: 'span',
+ props: { class: ['line', highlights.includes(lineIndex + 1) ? 'highlight' : ''].join(' ').trim() },
+ children: line.map(tokenSpan),
+ }))
+
+ function getColorProps(token: { color?: string | object }) {
+ if (!token.color)
+ return {}
+
+ if (typeof token.color === 'string')
+ return { style: { color: token.color } }
+
+ const key = Object.values(token.color).join('')
+ if (!colorMap[key]) {
+ colorMap[key] = {
+ colors: token.color,
+ className: `ct-${Math.random().toString(16).substring(2, 8)}`, // hash(key)
+ }
+ }
+ return { class: colorMap[key].className }
+ }
+
+ function tokenSpan(token: { content: string; color?: string | object }) {
+ return {
+ type: 'element',
+ tag: 'span',
+ props: getColorProps(token),
+ children: [{ type: 'text', value: token.content }],
+ }
+ }
+ }
+
+ const getHighlightedCode = async (code: string, lang: Lang, theme: Theme, opts?: Partial) => {
+ const colorMap = opts?.colorMap || {}
+ const highlights = opts?.highlights || []
+ const ast = await getHighlightedAST(code, lang, theme, { colorMap, highlights })
+
+ function renderNode(node: any) {
+ if (node.type === 'text')
+ return node.value.replace(//g, '>')
+
+ const children = node.children.map(renderNode).join('')
+ return `<${node.tag} class="${node.props.class}">${children}${node.tag}>`
+ }
+
+ return {
+ code: ast.map(renderNode).join(''),
+ styles: generateStyles(colorMap),
+ }
+ }
+
+ const generateStyles = (colorMap: TokenColorMap) => {
+ const colors: string[] = []
+ for (const colorClass of Object.values(colorMap)) {
+ Object.entries(colorClass.colors).forEach(([variant, color]) => {
+ if (variant === 'default')
+ colors.unshift(`.${colorClass.className}{color:${color}}`)
+ else
+ colors.push(`.${variant} .${colorClass.className}{color:${color}}`)
+ })
+ }
+ return colors.join('\n')
+ }
+
+ return {
+ getHighlightedTokens,
+ getHighlightedAST,
+ getHighlightedCode,
+ generateStyles,
+ }
+})
+
+function mergeLines(line1: HighlightThemedTokenLine, line2: HighlightThemedTokenLine) {
+ const mergedTokens: HighlightThemedToken[] = []
+ const getColors = (h: HighlightThemedTokenLine, i: number) => typeof h.tokens[i].color === 'string' ? { [h.key]: h.tokens[i].color } : h.tokens[i].color as object
+
+ const right = {
+ key: line1.key,
+ tokens: line1.tokens.slice(),
+ }
+ const left = {
+ key: line2.key,
+ tokens: line2.tokens.slice(),
+ }
+ let index = 0
+ while (index < right.tokens.length) {
+ const rightToken = right.tokens[index]
+ const leftToken = left.tokens[index]
+
+ if (rightToken.content === leftToken.content) {
+ mergedTokens.push({
+ content: rightToken.content,
+ color: {
+ ...getColors(right, index),
+ ...getColors(left, index),
+ },
+ })
+ index += 1
+ continue
+ }
+
+ if (rightToken.content.startsWith(leftToken.content)) {
+ const nextRightToken = {
+ ...rightToken,
+ content: rightToken.content.slice(leftToken.content.length),
+ }
+ rightToken.content = leftToken.content
+ right.tokens.splice(index + 1, 0, nextRightToken)
+ continue
+ }
+
+ if (leftToken.content.startsWith(rightToken.content)) {
+ const nextLeftToken = {
+ ...leftToken,
+ content: leftToken.content.slice(rightToken.content.length),
+ }
+ leftToken.content = rightToken.content
+ left.tokens.splice(index + 1, 0, nextLeftToken)
+ continue
+ }
+
+ throw new Error('Unexpected token')
+ }
+ return mergedTokens
+}
diff --git a/docs/transformers/shiki/index.ts b/docs/transformers/shiki/index.ts
new file mode 100644
index 00000000..abdb7d2c
--- /dev/null
+++ b/docs/transformers/shiki/index.ts
@@ -0,0 +1,2 @@
+export { default } from './shiki'
+export * from './highlighter'
diff --git a/docs/transformers/shiki/languages/mdc.tmLanguage.json b/docs/transformers/shiki/languages/mdc.tmLanguage.json
new file mode 100644
index 00000000..51425aec
--- /dev/null
+++ b/docs/transformers/shiki/languages/mdc.tmLanguage.json
@@ -0,0 +1,573 @@
+{
+ "information_for_contributors": [
+ "This file has been converted from https://github.com/docusgen/vscode-extension/blob/main/syntaxes/mdc.tmLanguage.json",
+ "If you want to provide a fix or improvement, please create a pull request against the original repository.",
+ "Once accepted there, we are happy to receive an update request."
+ ],
+ "version": "https://github.com/docusgen/vscode-extension/blob/1303abd16342880a42a4d143a660da049c79ea6c/syntaxes/mdc.tmLanguage.json",
+ "name": "markdown",
+ "injectionSelector": "L:text.html.markdown",
+ "scopeName": "text.markdown.mdc",
+ "patterns": [
+ {
+ "include": "text.html.markdown#frontMatter"
+ },
+ {
+ "include": "#component_block"
+ },
+ {
+ "include": "#block"
+ }
+ ],
+ "repository": {
+ "block": {
+ "comment": "Same as `text.html.markdown#block`, but without `raw_block`",
+ "patterns": [
+ {
+ "include": "#component_block"
+ },
+ {
+ "include": "text.html.markdown#separator"
+ },
+ {
+ "include": "#heading"
+ },
+ {
+ "include": "#blockquote"
+ },
+ {
+ "include": "#lists"
+ },
+ {
+ "include": "#paragraph"
+ },
+ {
+ "include": "text.html.markdown#fenced_code_block"
+ },
+ {
+ "include": "text.html.markdown#link-def"
+ },
+ {
+ "include": "text.html.markdown#html"
+ }
+ ]
+ },
+ "inline": {
+ "patterns": [
+ {
+ "include": "#component_inline"
+ },
+ {
+ "include": "#span"
+ },
+ {
+ "include": "#markdown_attributes"
+ }
+ ]
+ },
+ "markdown_attributes": {
+ "match": "(?x)([^ ])( # attributes\n ({)\n ([^{]*)\n (})\n )",
+ "name": "markup.component.attribute",
+ "captures": {
+ "4": {
+ "patterns": [
+ {
+ "include": "#attribute"
+ }
+ ]
+ }
+ }
+ },
+ "span": {
+ "match": "(?x)\n (\\[) # Open\n ([^]]*)\n (\\])\n ( # attributes\n ({)\n ([^{]*)\n (})\n )?",
+ "name": "markup.component.span",
+ "captures": {
+ "2": {
+ "name": "string.other.link.description.title.markdown"
+ },
+ "4": {
+ "patterns": [
+ {
+ "include": "#attributes"
+ }
+ ]
+ }
+ }
+ },
+ "attributes": {
+ "match": "(?x)( # attributes\n ({)\n ([^{]*)\n (})\n )",
+ "name": "markup.attributes",
+ "captures": {
+ "3": {
+ "patterns": [
+ {
+ "include": "#attribute"
+ }
+ ]
+ }
+ }
+ },
+ "component_inline": {
+ "match": "(?x)\n (^|\\G|\\s+)\n (:) # component colon\n (?i: # component name\n (\\w[\\w\\d-]*)\n )\n (\n ({[^}]*}) # attributes\n (\\[[^\\]]*\\]?) # slot\n # reverse order\n | (\\[[^\\]]*\\]) # slot\n ({[^}]*})? # attributes\n )?",
+ "name": "markup.component.inline",
+ "captures": {
+ "2": {
+ "name": "punctuation.definition.tag.start.component"
+ },
+ "3": {
+ "name": "entity.name.tag.component"
+ },
+ "5": {
+ "patterns": [
+ {
+ "include": "#attributes"
+ }
+ ]
+ },
+ "6": {
+ "patterns": [
+ {
+ "include": "#span"
+ }
+ ]
+ },
+ "7": {
+ "patterns": [
+ {
+ "include": "#span"
+ }
+ ]
+ },
+ "8": {
+ "patterns": [
+ {
+ "include": "#attributes"
+ }
+ ]
+ }
+ }
+ },
+ "component_block": {
+ "begin": "(?x)\n (^|\\G)(\\s*)\n (:{2,}) # component colons\n (?i:\n (\\w[\\w\\d-]+) # component name\n ( # folowing spaces or attributes\n \\s*\n | {([^{]*)}\n )\n $\n )",
+ "name": "markup.component.block",
+ "end": "(^|\\G)(\\2)(\\3)\\s*$",
+ "beginCaptures": {
+ "4": {
+ "name": "entity.name.tag.component"
+ },
+ "5": {
+ "patterns": [
+ {
+ "include": "#attribute"
+ }
+ ]
+ }
+ },
+ "patterns": [
+ {
+ "include": "#content"
+ }
+ ]
+ },
+ "content": {
+ "begin": "(^|\\G)(\\s*)(.*)",
+ "while": "(^|\\G)(?!\\s*([:]{2,})\\s*$)",
+ "contentName": "meta.embedded.block.component",
+ "patterns": [
+ {
+ "begin": "(^|\\G)(\\s*)(-{3})(\\s*)$",
+ "end": "(^|\\G)(\\s*(-{3})(\\s*)$)",
+ "patterns": [
+ {
+ "include": "source.yaml"
+ }
+ ]
+ },
+ {
+ "match": "^(\\s*)(#[\\w\\-\\_]*)\\s*()?$",
+ "captures": {
+ "2": {
+ "name": "entity.other.attribute-name.html"
+ },
+ "3": {
+ "name": "comment.block.html"
+ }
+ }
+ },
+ {
+ "comment": "Block Repository created to disable 4-space raw block inside components",
+ "include": "#block"
+ }
+ ]
+ },
+ "attribute": {
+ "patterns": [
+ {
+ "match": "(?x)\n (\n ([^=><\\s]*) # attribute name\n ( # attribute value\n =[\"]([^\"]*)([\"])|[']([^']*)(['])\n | =[^\\s'\"]*\n )?\n \\s*\n )",
+ "captures": {
+ "2": {
+ "name": "entity.other.attribute-name.html"
+ },
+ "3": {
+ "patterns": [
+ {
+ "include": "#attribute-interior"
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "attribute-interior": {
+ "comment": "https://github.com/microsoft/vscode/blob/08d59c432609ae9306eb3889815977e93bb548de/extensions/html/syntaxes/html.tmLanguage.json#L376",
+ "patterns": [
+ {
+ "begin": "=",
+ "beginCaptures": {
+ "0": {
+ "name": "punctuation.separator.key-value.html"
+ }
+ },
+ "end": "(?<=[^\\s=])(?!\\s*=)|(?=/?>)",
+ "patterns": [
+ {
+ "match": "([^\\s\"'=<>`/]|/(?!>))+",
+ "name": "string.unquoted.html"
+ },
+ {
+ "begin": "\"",
+ "beginCaptures": {
+ "0": {
+ "name": "punctuation.definition.string.begin.html"
+ }
+ },
+ "end": "\"",
+ "endCaptures": {
+ "0": {
+ "name": "punctuation.definition.string.end.html"
+ }
+ },
+ "name": "string.quoted.double.html",
+ "patterns": [
+ {
+ "include": "#entities"
+ }
+ ]
+ },
+ {
+ "begin": "'",
+ "beginCaptures": {
+ "0": {
+ "name": "punctuation.definition.string.begin.html"
+ }
+ },
+ "end": "'",
+ "endCaptures": {
+ "0": {
+ "name": "punctuation.definition.string.end.html"
+ }
+ },
+ "name": "string.quoted.single.html",
+ "patterns": [
+ {
+ "include": "#entities"
+ }
+ ]
+ },
+ {
+ "match": "=",
+ "name": "invalid.illegal.unexpected-equals-sign.html"
+ }
+ ]
+ }
+ ]
+ },
+ "entities": {
+ "comment": "https://github.com/microsoft/vscode/blob/08d59c432609ae9306eb3889815977e93bb548de/extensions/html/syntaxes/html.tmLanguage.json#L532",
+ "patterns": [
+ {
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.entity.html"
+ },
+ "912": {
+ "name": "punctuation.definition.entity.html"
+ }
+ },
+ "comment": "Yes this is a bit ridiculous, there are quite a lot of these",
+ "match": "(?x)\n\t\t\t\t\t\t(&)\t(?=[a-zA-Z])\n\t\t\t\t\t\t(\n\t\t\t\t\t\t\t(a(s(ymp(eq)?|cr|t)|n(d(slope|d|v|and)?|g(s(t|ph)|zarr|e|le|rt(vb(d)?)?|msd(a(h|c|d|e|f|a|g|b))?)?)|c(y|irc|d|ute|E)?|tilde|o(pf|gon)|uml|p(id|os|prox(eq)?|e|E|acir)?|elig|f(r)?|w(conint|int)|l(pha|e(ph|fsym))|acute|ring|grave|m(p|a(cr|lg))|breve)|A(s(sign|cr)|nd|MP|c(y|irc)|tilde|o(pf|gon)|uml|pplyFunction|fr|Elig|lpha|acute|ring|grave|macr|breve))\n\t\t\t\t\t\t | (B(scr|cy|opf|umpeq|e(cause|ta|rnoullis)|fr|a(ckslash|r(v|wed))|reve)|b(s(cr|im(e)?|ol(hsub|b)?|emi)|n(ot|e(quiv)?)|c(y|ong)|ig(s(tar|qcup)|c(irc|up|ap)|triangle(down|up)|o(times|dot|plus)|uplus|vee|wedge)|o(t(tom)?|pf|wtie|x(h(d|u|D|U)?|times|H(d|u|D|U)?|d(R|l|r|L)|u(R|l|r|L)|plus|D(R|l|r|L)|v(R|h|H|l|r|L)?|U(R|l|r|L)|V(R|h|H|l|r|L)?|minus|box))|Not|dquo|u(ll(et)?|mp(e(q)?|E)?)|prime|e(caus(e)?|t(h|ween|a)|psi|rnou|mptyv)|karow|fr|l(ock|k(1(2|4)|34)|a(nk|ck(square|triangle(down|left|right)?|lozenge)))|a(ck(sim(eq)?|cong|prime|epsilon)|r(vee|wed(ge)?))|r(eve|vbar)|brk(tbrk)?))\n\t\t\t\t\t\t | (c(s(cr|u(p(e)?|b(e)?))|h(cy|i|eck(mark)?)|ylcty|c(irc|ups(sm)?|edil|a(ps|ron))|tdot|ir(scir|c(eq|le(d(R|circ|S|dash|ast)|arrow(left|right)))?|e|fnint|E|mid)?|o(n(int|g(dot)?)|p(y(sr)?|f|rod)|lon(e(q)?)?|m(p(fn|le(xes|ment))?|ma(t)?))|dot|u(darr(l|r)|p(s|c(up|ap)|or|dot|brcap)?|e(sc|pr)|vee|wed|larr(p)?|r(vearrow(left|right)|ly(eq(succ|prec)|vee|wedge)|arr(m)?|ren))|e(nt(erdot)?|dil|mptyv)|fr|w(conint|int)|lubs(uit)?|a(cute|p(s|c(up|ap)|dot|and|brcup)?|r(on|et))|r(oss|arr))|C(scr|hi|c(irc|onint|edil|aron)|ircle(Minus|Times|Dot|Plus)|Hcy|o(n(tourIntegral|int|gruent)|unterClockwiseContourIntegral|p(f|roduct)|lon(e)?)|dot|up(Cap)?|OPY|e(nterDot|dilla)|fr|lo(seCurly(DoubleQuote|Quote)|ckwiseContourIntegral)|a(yleys|cute|p(italDifferentialD)?)|ross))\n\t\t\t\t\t\t | (d(s(c(y|r)|trok|ol)|har(l|r)|c(y|aron)|t(dot|ri(f)?)|i(sin|e|v(ide(ontimes)?|onx)?|am(s|ond(suit)?)?|gamma)|Har|z(cy|igrarr)|o(t(square|plus|eq(dot)?|minus)?|ublebarwedge|pf|wn(harpoon(left|right)|downarrows|arrow)|llar)|d(otseq|a(rr|gger))?|u(har|arr)|jcy|e(lta|g|mptyv)|f(isht|r)|wangle|lc(orn|rop)|a(sh(v)?|leth|rr|gger)|r(c(orn|rop)|bkarow)|b(karow|lac)|Arr)|D(s(cr|trok)|c(y|aron)|Scy|i(fferentialD|a(critical(Grave|Tilde|Do(t|ubleAcute)|Acute)|mond))|o(t(Dot|Equal)?|uble(Right(Tee|Arrow)|ContourIntegral|Do(t|wnArrow)|Up(DownArrow|Arrow)|VerticalBar|L(ong(RightArrow|Left(RightArrow|Arrow))|eft(RightArrow|Tee|Arrow)))|pf|wn(Right(TeeVector|Vector(Bar)?)|Breve|Tee(Arrow)?|arrow|Left(RightVector|TeeVector|Vector(Bar)?)|Arrow(Bar|UpArrow)?))|Zcy|el(ta)?|D(otrahd)?|Jcy|fr|a(shv|rr|gger)))\n\t\t\t\t\t\t | (e(s(cr|im|dot)|n(sp|g)|c(y|ir(c)?|olon|aron)|t(h|a)|o(pf|gon)|dot|u(ro|ml)|p(si(v|lon)?|lus|ar(sl)?)|e|D(ot|Dot)|q(s(im|lant(less|gtr))|c(irc|olon)|u(iv(DD)?|est|als)|vparsl)|f(Dot|r)|l(s(dot)?|inters|l)?|a(ster|cute)|r(Dot|arr)|g(s(dot)?|rave)?|x(cl|ist|p(onentiale|ectation))|m(sp(1(3|4))?|pty(set|v)?|acr))|E(s(cr|im)|c(y|irc|aron)|ta|o(pf|gon)|NG|dot|uml|TH|psilon|qu(ilibrium|al(Tilde)?)|fr|lement|acute|grave|x(ists|ponentialE)|m(pty(SmallSquare|VerySmallSquare)|acr)))\n\t\t\t\t\t\t | (f(scr|nof|cy|ilig|o(pf|r(k(v)?|all))|jlig|partint|emale|f(ilig|l(ig|lig)|r)|l(tns|lig|at)|allingdotseq|r(own|a(sl|c(1(2|8|3|4|5|6)|78|2(3|5)|3(8|4|5)|45|5(8|6)))))|F(scr|cy|illed(SmallSquare|VerySmallSquare)|o(uriertrf|pf|rAll)|fr))\n\t\t\t\t\t\t | (G(scr|c(y|irc|edil)|t|opf|dot|T|Jcy|fr|amma(d)?|reater(Greater|SlantEqual|Tilde|Equal(Less)?|FullEqual|Less)|g|breve)|g(s(cr|im(e|l)?)|n(sim|e(q(q)?)?|E|ap(prox)?)|c(y|irc)|t(c(c|ir)|dot|quest|lPar|r(sim|dot|eq(qless|less)|less|a(pprox|rr)))?|imel|opf|dot|jcy|e(s(cc|dot(o(l)?)?|l(es)?)?|q(slant|q)?|l)?|v(nE|ertneqq)|fr|E(l)?|l(j|E|a)?|a(cute|p|mma(d)?)|rave|g(g)?|breve))\n\t\t\t\t\t\t | (h(s(cr|trok|lash)|y(phen|bull)|circ|o(ok(leftarrow|rightarrow)|pf|arr|rbar|mtht)|e(llip|arts(uit)?|rcon)|ks(earow|warow)|fr|a(irsp|lf|r(dcy|r(cir|w)?)|milt)|bar|Arr)|H(s(cr|trok)|circ|ilbertSpace|o(pf|rizontalLine)|ump(DownHump|Equal)|fr|a(cek|t)|ARDcy))\n\t\t\t\t\t\t | (i(s(cr|in(s(v)?|dot|v|E)?)|n(care|t(cal|prod|e(rcal|gers)|larhk)?|odot|fin(tie)?)?|c(y|irc)?|t(ilde)?|i(nfin|i(nt|int)|ota)?|o(cy|ta|pf|gon)|u(kcy|ml)|jlig|prod|e(cy|xcl)|quest|f(f|r)|acute|grave|m(of|ped|a(cr|th|g(part|e|line))))|I(scr|n(t(e(rsection|gral))?|visible(Comma|Times))|c(y|irc)|tilde|o(ta|pf|gon)|dot|u(kcy|ml)|Ocy|Jlig|fr|Ecy|acute|grave|m(plies|a(cr|ginaryI))?))\n\t\t\t\t\t\t | (j(s(cr|ercy)|c(y|irc)|opf|ukcy|fr|math)|J(s(cr|ercy)|c(y|irc)|opf|ukcy|fr))\n\t\t\t\t\t\t | (k(scr|hcy|c(y|edil)|opf|jcy|fr|appa(v)?|green)|K(scr|c(y|edil)|Hcy|opf|Jcy|fr|appa))\n\t\t\t\t\t\t | (l(s(h|cr|trok|im(e|g)?|q(uo(r)?|b)|aquo)|h(ar(d|u(l)?)|blk)|n(sim|e(q(q)?)?|E|ap(prox)?)|c(y|ub|e(il|dil)|aron)|Barr|t(hree|c(c|ir)|imes|dot|quest|larr|r(i(e|f)?|Par))?|Har|o(ng(left(arrow|rightarrow)|rightarrow|mapsto)|times|z(enge|f)?|oparrow(left|right)|p(f|lus|ar)|w(ast|bar)|a(ng|rr)|brk)|d(sh|ca|quo(r)?|r(dhar|ushar))|ur(dshar|uhar)|jcy|par(lt)?|e(s(s(sim|dot|eq(qgtr|gtr)|approx|gtr)|cc|dot(o(r)?)?|g(es)?)?|q(slant|q)?|ft(harpoon(down|up)|threetimes|leftarrows|arrow(tail)?|right(squigarrow|harpoons|arrow(s)?))|g)?|v(nE|ertneqq)|f(isht|loor|r)|E(g)?|l(hard|corner|tri|arr)?|a(ng(d|le)?|cute|t(e(s)?|ail)?|p|emptyv|quo|rr(sim|hk|tl|pl|fs|lp|b(fs)?)?|gran|mbda)|r(har(d)?|corner|tri|arr|m)|g(E)?|m(idot|oust(ache)?)|b(arr|r(k(sl(d|u)|e)|ac(e|k))|brk)|A(tail|arr|rr))|L(s(h|cr|trok)|c(y|edil|aron)|t|o(ng(RightArrow|left(arrow|rightarrow)|rightarrow|Left(RightArrow|Arrow))|pf|wer(RightArrow|LeftArrow))|T|e(ss(Greater|SlantEqual|Tilde|EqualGreater|FullEqual|Less)|ft(Right(Vector|Arrow)|Ceiling|T(ee(Vector|Arrow)?|riangle(Bar|Equal)?)|Do(ubleBracket|wn(TeeVector|Vector(Bar)?))|Up(TeeVector|DownVector|Vector(Bar)?)|Vector(Bar)?|arrow|rightarrow|Floor|A(ngleBracket|rrow(RightArrow|Bar)?)))|Jcy|fr|l(eftarrow)?|a(ng|cute|placetrf|rr|mbda)|midot))\n\t\t\t\t\t\t | (M(scr|cy|inusPlus|opf|u|e(diumSpace|llintrf)|fr|ap)|m(s(cr|tpos)|ho|nplus|c(y|omma)|i(nus(d(u)?|b)?|cro|d(cir|dot|ast)?)|o(dels|pf)|dash|u(ltimap|map)?|p|easuredangle|DDot|fr|l(cp|dr)|a(cr|p(sto(down|up|left)?)?|l(t(ese)?|e)|rker)))\n\t\t\t\t\t\t | (n(s(hort(parallel|mid)|c(cue|e|r)?|im(e(q)?)?|u(cc(eq)?|p(set(eq(q)?)?|e|E)?|b(set(eq(q)?)?|e|E)?)|par|qsu(pe|be)|mid)|Rightarrow|h(par|arr|Arr)|G(t(v)?|g)|c(y|ong(dot)?|up|edil|a(p|ron))|t(ilde|lg|riangle(left(eq)?|right(eq)?)|gl)|i(s(d)?|v)?|o(t(ni(v(c|a|b))?|in(dot|v(c|a|b)|E)?)?|pf)|dash|u(m(sp|ero)?)?|jcy|p(olint|ar(sl|t|allel)?|r(cue|e(c(eq)?)?)?)|e(s(im|ear)|dot|quiv|ar(hk|r(ow)?)|xist(s)?|Arr)?|v(sim|infin|Harr|dash|Dash|l(t(rie)?|e|Arr)|ap|r(trie|Arr)|g(t|e))|fr|w(near|ar(hk|r(ow)?)|Arr)|V(dash|Dash)|l(sim|t(ri(e)?)?|dr|e(s(s)?|q(slant|q)?|ft(arrow|rightarrow))?|E|arr|Arr)|a(ng|cute|tur(al(s)?)?|p(id|os|prox|E)?|bla)|r(tri(e)?|ightarrow|arr(c|w)?|Arr)|g(sim|t(r)?|e(s|q(slant|q)?)?|E)|mid|L(t(v)?|eft(arrow|rightarrow)|l)|b(sp|ump(e)?))|N(scr|c(y|edil|aron)|tilde|o(nBreakingSpace|Break|t(R(ightTriangle(Bar|Equal)?|everseElement)|Greater(Greater|SlantEqual|Tilde|Equal|FullEqual|Less)?|S(u(cceeds(SlantEqual|Tilde|Equal)?|perset(Equal)?|bset(Equal)?)|quareSu(perset(Equal)?|bset(Equal)?))|Hump(DownHump|Equal)|Nested(GreaterGreater|LessLess)|C(ongruent|upCap)|Tilde(Tilde|Equal|FullEqual)?|DoubleVerticalBar|Precedes(SlantEqual|Equal)?|E(qual(Tilde)?|lement|xists)|VerticalBar|Le(ss(Greater|SlantEqual|Tilde|Equal|Less)?|ftTriangle(Bar|Equal)?))?|pf)|u|e(sted(GreaterGreater|LessLess)|wLine|gative(MediumSpace|Thi(nSpace|ckSpace)|VeryThinSpace))|Jcy|fr|acute))\n\t\t\t\t\t\t | (o(s(cr|ol|lash)|h(m|bar)|c(y|ir(c)?)|ti(lde|mes(as)?)|S|int|opf|d(sold|iv|ot|ash|blac)|uml|p(erp|lus|ar)|elig|vbar|f(cir|r)|l(c(ir|ross)|t|ine|arr)|a(st|cute)|r(slope|igof|or|d(er(of)?|f|m)?|v|arr)?|g(t|on|rave)|m(i(nus|cron|d)|ega|acr))|O(s(cr|lash)|c(y|irc)|ti(lde|mes)|opf|dblac|uml|penCurly(DoubleQuote|Quote)|ver(B(ar|rac(e|ket))|Parenthesis)|fr|Elig|acute|r|grave|m(icron|ega|acr)))\n\t\t\t\t\t\t | (p(s(cr|i)|h(i(v)?|one|mmat)|cy|i(tchfork|v)?|o(intint|und|pf)|uncsp|er(cnt|tenk|iod|p|mil)|fr|l(us(sim|cir|two|d(o|u)|e|acir|mn|b)?|an(ck(h)?|kv))|ar(s(im|l)|t|a(llel)?)?|r(sim|n(sim|E|ap)|cue|ime(s)?|o(d|p(to)?|f(surf|line|alar))|urel|e(c(sim|n(sim|eqq|approx)|curlyeq|eq|approx)?)?|E|ap)?|m)|P(s(cr|i)|hi|cy|i|o(incareplane|pf)|fr|lusMinus|artialD|r(ime|o(duct|portion(al)?)|ecedes(SlantEqual|Tilde|Equal)?)?))\n\t\t\t\t\t\t | (q(scr|int|opf|u(ot|est(eq)?|at(int|ernions))|prime|fr)|Q(scr|opf|UOT|fr))\n\t\t\t\t\t\t | (R(s(h|cr)|ho|c(y|edil|aron)|Barr|ight(Ceiling|T(ee(Vector|Arrow)?|riangle(Bar|Equal)?)|Do(ubleBracket|wn(TeeVector|Vector(Bar)?))|Up(TeeVector|DownVector|Vector(Bar)?)|Vector(Bar)?|arrow|Floor|A(ngleBracket|rrow(Bar|LeftArrow)?))|o(undImplies|pf)|uleDelayed|e(verse(UpEquilibrium|E(quilibrium|lement)))?|fr|EG|a(ng|cute|rr(tl)?)|rightarrow)|r(s(h|cr|q(uo(r)?|b)|aquo)|h(o(v)?|ar(d|u(l)?))|nmid|c(y|ub|e(il|dil)|aron)|Barr|t(hree|imes|ri(e|f|ltri)?)|i(singdotseq|ng|ght(squigarrow|harpoon(down|up)|threetimes|left(harpoons|arrows)|arrow(tail)?|rightarrows))|Har|o(times|p(f|lus|ar)|a(ng|rr)|brk)|d(sh|ca|quo(r)?|ldhar)|uluhar|p(polint|ar(gt)?)|e(ct|al(s|ine|part)?|g)|f(isht|loor|r)|l(har|arr|m)|a(ng(d|e|le)?|c(ute|e)|t(io(nals)?|ail)|dic|emptyv|quo|rr(sim|hk|c|tl|pl|fs|w|lp|ap|b(fs)?)?)|rarr|x|moust(ache)?|b(arr|r(k(sl(d|u)|e)|ac(e|k))|brk)|A(tail|arr|rr)))\n\t\t\t\t\t\t | (s(s(cr|tarf|etmn|mile)|h(y|c(hcy|y)|ort(parallel|mid)|arp)|c(sim|y|n(sim|E|ap)|cue|irc|polint|e(dil)?|E|a(p|ron))?|t(ar(f)?|r(ns|aight(phi|epsilon)))|i(gma(v|f)?|m(ne|dot|plus|e(q)?|l(E)?|rarr|g(E)?)?)|zlig|o(pf|ftcy|l(b(ar)?)?)|dot(e|b)?|u(ng|cc(sim|n(sim|eqq|approx)|curlyeq|eq|approx)?|p(s(im|u(p|b)|et(neq(q)?|eq(q)?)?)|hs(ol|ub)|1|n(e|E)|2|d(sub|ot)|3|plus|e(dot)?|E|larr|mult)?|m|b(s(im|u(p|b)|et(neq(q)?|eq(q)?)?)|n(e|E)|dot|plus|e(dot)?|E|rarr|mult)?)|pa(des(uit)?|r)|e(swar|ct|tm(n|inus)|ar(hk|r(ow)?)|xt|mi|Arr)|q(su(p(set(eq)?|e)?|b(set(eq)?|e)?)|c(up(s)?|ap(s)?)|u(f|ar(e|f))?)|fr(own)?|w(nwar|ar(hk|r(ow)?)|Arr)|larr|acute|rarr|m(t(e(s)?)?|i(d|le)|eparsl|a(shp|llsetminus))|bquo)|S(scr|hort(RightArrow|DownArrow|UpArrow|LeftArrow)|c(y|irc|edil|aron)?|tar|igma|H(cy|CHcy)|opf|u(c(hThat|ceeds(SlantEqual|Tilde|Equal)?)|p(set|erset(Equal)?)?|m|b(set(Equal)?)?)|OFTcy|q(uare(Su(perset(Equal)?|bset(Equal)?)|Intersection|Union)?|rt)|fr|acute|mallCircle))\n\t\t\t\t\t\t | (t(s(hcy|c(y|r)|trok)|h(i(nsp|ck(sim|approx))|orn|e(ta(sym|v)?|re(4|fore))|k(sim|ap))|c(y|edil|aron)|i(nt|lde|mes(d|b(ar)?)?)|o(sa|p(cir|f(ork)?|bot)?|ea)|dot|prime|elrec|fr|w(ixt|ohead(leftarrow|rightarrow))|a(u|rget)|r(i(sb|time|dot|plus|e|angle(down|q|left(eq)?|right(eq)?)?|minus)|pezium|ade)|brk)|T(s(cr|trok)|RADE|h(i(nSpace|ckSpace)|e(ta|refore))|c(y|edil|aron)|S(cy|Hcy)|ilde(Tilde|Equal|FullEqual)?|HORN|opf|fr|a(u|b)|ripleDot))\n\t\t\t\t\t\t | (u(scr|h(ar(l|r)|blk)|c(y|irc)|t(ilde|dot|ri(f)?)|Har|o(pf|gon)|d(har|arr|blac)|u(arr|ml)|p(si(h|lon)?|harpoon(left|right)|downarrow|uparrows|lus|arrow)|f(isht|r)|wangle|l(c(orn(er)?|rop)|tri)|a(cute|rr)|r(c(orn(er)?|rop)|tri|ing)|grave|m(l|acr)|br(cy|eve)|Arr)|U(scr|n(ion(Plus)?|der(B(ar|rac(e|ket))|Parenthesis))|c(y|irc)|tilde|o(pf|gon)|dblac|uml|p(si(lon)?|downarrow|Tee(Arrow)?|per(RightArrow|LeftArrow)|DownArrow|Equilibrium|arrow|Arrow(Bar|DownArrow)?)|fr|a(cute|rr(ocir)?)|ring|grave|macr|br(cy|eve)))\n\t\t\t\t\t\t | (v(s(cr|u(pn(e|E)|bn(e|E)))|nsu(p|b)|cy|Bar(v)?|zigzag|opf|dash|prop|e(e(eq|bar)?|llip|r(t|bar))|Dash|fr|ltri|a(ngrt|r(s(igma|u(psetneq(q)?|bsetneq(q)?))|nothing|t(heta|riangle(left|right))|p(hi|i|ropto)|epsilon|kappa|r(ho)?))|rtri|Arr)|V(scr|cy|opf|dash(l)?|e(e|r(yThinSpace|t(ical(Bar|Separator|Tilde|Line))?|bar))|Dash|vdash|fr|bar))\n\t\t\t\t\t\t | (w(scr|circ|opf|p|e(ierp|d(ge(q)?|bar))|fr|r(eath)?)|W(scr|circ|opf|edge|fr))\n\t\t\t\t\t\t | (X(scr|i|opf|fr)|x(s(cr|qcup)|h(arr|Arr)|nis|c(irc|up|ap)|i|o(time|dot|p(f|lus))|dtri|u(tri|plus)|vee|fr|wedge|l(arr|Arr)|r(arr|Arr)|map))\n\t\t\t\t\t\t | (y(scr|c(y|irc)|icy|opf|u(cy|ml)|en|fr|ac(y|ute))|Y(scr|c(y|irc)|opf|uml|Icy|Ucy|fr|acute|Acy))\n\t\t\t\t\t\t | (z(scr|hcy|c(y|aron)|igrarr|opf|dot|e(ta|etrf)|fr|w(nj|j)|acute)|Z(scr|c(y|aron)|Hcy|opf|dot|e(ta|roWidthSpace)|fr|acute))\n\t\t\t\t\t\t)\n\t\t\t\t\t\t(;)\n\t\t\t\t\t",
+ "name": "constant.character.entity.named.$2.html"
+ },
+ {
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.entity.html"
+ },
+ "3": {
+ "name": "punctuation.definition.entity.html"
+ }
+ },
+ "match": "(&)#[0-9]+(;)",
+ "name": "constant.character.entity.numeric.decimal.html"
+ },
+ {
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.entity.html"
+ },
+ "3": {
+ "name": "punctuation.definition.entity.html"
+ }
+ },
+ "match": "(&)#[xX][0-9a-fA-F]+(;)",
+ "name": "constant.character.entity.numeric.hexadecimal.html"
+ },
+ {
+ "match": "&(?=[a-zA-Z0-9]+;)",
+ "name": "invalid.illegal.ambiguous-ampersand.html"
+ }
+ ]
+ },
+ "heading": {
+ "match": "(?:^|\\G)[ ]*(#{1,6}\\s+(.*?)(\\s+#{1,6})?\\s*)$",
+ "captures": {
+ "1": {
+ "patterns": [
+ {
+ "match": "(#{6})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.6.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ },
+ {
+ "match": "(#{5})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.5.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ },
+ {
+ "match": "(#{4})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.4.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ },
+ {
+ "match": "(#{3})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.3.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ },
+ {
+ "match": "(#{2})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.2.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ },
+ {
+ "match": "(#{1})\\s+(.*?)(?:\\s+(#+))?\\s*$",
+ "name": "heading.1.markdown",
+ "captures": {
+ "1": {
+ "name": "punctuation.definition.heading.markdown"
+ },
+ "2": {
+ "name": "entity.name.section.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ }
+ ]
+ },
+ "3": {
+ "name": "punctuation.definition.heading.markdown"
+ }
+ }
+ }
+ ]
+ }
+ },
+ "name": "markup.heading.markdown",
+ "patterns": [
+ {
+ "include": "text.html.markdown#inline"
+ }
+ ]
+ },
+ "heading-setext": {
+ "patterns": [
+ {
+ "match": "^(={3,})(?=[ \\t]*$\\n?)",
+ "name": "markup.heading.setext.1.markdown"
+ },
+ {
+ "match": "^(-{3,})(?=[ \\t]*$\\n?)",
+ "name": "markup.heading.setext.2.markdown"
+ }
+ ]
+ },
+ "lists": {
+ "patterns": [
+ {
+ "begin": "(^|\\G)([ ]*)([*+-])([ \\t])",
+ "beginCaptures": {
+ "3": {
+ "name": "punctuation.definition.list.begin.markdown"
+ }
+ },
+ "comment": "Currently does not support un-indented second lines.",
+ "name": "markup.list.unnumbered.markdown",
+ "patterns": [
+ {
+ "include": "#block"
+ },
+ {
+ "include": "text.html.markdown#list_paragraph"
+ }
+ ],
+ "while": "((^|\\G)([ ]*|\\t))|(^[ \\t]*$)"
+ },
+ {
+ "begin": "(^|\\G)([ ]*)([0-9]+\\.)([ \\t])",
+ "beginCaptures": {
+ "3": {
+ "name": "punctuation.definition.list.begin.markdown"
+ }
+ },
+ "name": "markup.list.numbered.markdown",
+ "patterns": [
+ {
+ "include": "#block"
+ },
+ {
+ "include": "text.html.markdown#list_paragraph"
+ }
+ ],
+ "while": "((^|\\G)([ ]*|\\t))|(^[ \\t]*$)"
+ }
+ ]
+ },
+ "paragraph": {
+ "begin": "(^|\\G)[ ]*(?=\\S)",
+ "name": "meta.paragraph.markdown",
+ "patterns": [
+ {
+ "include": "#inline"
+ },
+ {
+ "include": "text.html.markdown#inline"
+ },
+ {
+ "include": "text.html.derivative"
+ },
+ {
+ "include": "#heading-setext"
+ }
+ ],
+ "while": "(^|\\G)((?=\\s*[-=]{3,}\\s*$)|[ ]{4,}(?=\\S))"
+ },
+ "blockquote": {
+ "begin": "(^|\\G)[ ]*(>) ?",
+ "captures": {
+ "2": {
+ "name": "punctuation.definition.quote.begin.markdown"
+ }
+ },
+ "name": "markup.quote.markdown",
+ "patterns": [
+ {
+ "include": "#block"
+ }
+ ],
+ "while": "(^|\\G)\\s*(>) ?"
+ }
+ }
+}
\ No newline at end of file
diff --git a/docs/transformers/shiki/shiki.ts b/docs/transformers/shiki/shiki.ts
new file mode 100644
index 00000000..c8818457
--- /dev/null
+++ b/docs/transformers/shiki/shiki.ts
@@ -0,0 +1,83 @@
+import { visit } from 'unist-util-visit'
+import type { MarkdownRoot } from '../../types'
+import { defineTransformer } from '../utils'
+import { useShikiHighlighter } from './highlighter'
+import type { MarkdownNode, TokenColorMap } from './types'
+
+export default defineTransformer({
+ name: 'highlight',
+ extensions: ['.md'],
+ transform: async (content, options = {}) => {
+ const shikiHighlighter = useShikiHighlighter(options)
+
+ await Promise.all([
+ highlight(content.body),
+ highlight(content.excerpt),
+ ])
+
+ return content
+
+ /**
+ * Highlight document with code nodes
+ * @param document tree
+ */
+ async function highlight(document: MarkdownRoot) {
+ if (!document)
+ return
+
+ const colorMap: TokenColorMap = {}
+ const codeBlocks: any[] = []
+ const inlineCodes: any = []
+ visit(
+ document,
+ (node: any) => (node?.tag === 'code' && node?.props.code) || (node?.tag === 'code-inline' && (node.props?.lang || node.props?.language)),
+ (node: MarkdownNode) => {
+ if (node?.tag === 'code')
+ codeBlocks.push(node)
+ else if (node?.tag === 'code-inline')
+ inlineCodes.push(node)
+ },
+ )
+
+ await Promise.all(codeBlocks.map((node: MarkdownNode) => highlightBlock(node, colorMap)))
+ await Promise.all(inlineCodes.map((node: MarkdownNode) => highlightInline(node, colorMap)))
+
+ // Inject token colors at the end of the document
+ if (Object.values(colorMap).length) {
+ document?.children.push({
+ type: 'element',
+ tag: 'style',
+ children: [{ type: 'text', value: shikiHighlighter.generateStyles(colorMap) }],
+ })
+ }
+ }
+
+ /**
+ * Highlight inline code
+ */
+ async function highlightInline(node: MarkdownNode, colorMap: TokenColorMap) {
+ const code = node.children![0].value!
+
+ // Fetch highlighted tokens
+ const lines = await shikiHighlighter.getHighlightedAST(code, node.props!.lang || node.props!.language, options.theme, { colorMap })
+
+ // Generate highlighted children
+ node.children = lines[0].children
+ node.props = Object.assign(node.props || {}, { class: 'colored' })
+
+ return node
+ }
+
+ /**
+ * Highlight a code block
+ */
+ async function highlightBlock(node: MarkdownNode, colorMap: TokenColorMap) {
+ const { code, language: lang, highlights = [] } = node.props!
+
+ const innerCodeNode = node.children![0].children![0]
+ innerCodeNode.children = await shikiHighlighter.getHighlightedAST(code, lang, options.theme, { colorMap, highlights })
+
+ return node
+ }
+ },
+})
diff --git a/docs/transformers/shiki/types.d.ts b/docs/transformers/shiki/types.d.ts
new file mode 100644
index 00000000..83414c50
--- /dev/null
+++ b/docs/transformers/shiki/types.d.ts
@@ -0,0 +1,27 @@
+import { Theme as ShikiTheme } from 'shiki-es'
+export type { MarkdownNode } from '../../types'
+
+export type Theme = ShikiTheme | Record
+
+export type TokenColorMap = Record
+
+export interface HighlightParams {
+ code: string
+ lang: string
+ theme: Theme
+}
+
+export interface HighlighterOptions {
+ colorMap: TokenColorMap
+ highlights: Array
+}
+
+export interface HighlightThemedToken {
+ content: string
+ color?: string | Record
+}
+
+export interface HighlightThemedTokenLine {
+ key: string
+ tokens: HighlightThemedToken[]
+}
diff --git a/docs/transformers/utils.ts b/docs/transformers/utils.ts
new file mode 100644
index 00000000..5447e7bc
--- /dev/null
+++ b/docs/transformers/utils.ts
@@ -0,0 +1,15 @@
+import type { ContentTransformer } from '../types'
+
+export const defineTransformer = (transformer: ContentTransformer) => {
+ return transformer
+}
+
+export const createSingleton = >(fn: (...arg: Params) => T) => {
+ let instance: T | undefined
+ return (...args: Params) => {
+ if (!instance)
+ instance = fn(...args)
+
+ return instance
+ }
+}
diff --git a/docs/transformers/yaml.ts b/docs/transformers/yaml.ts
new file mode 100644
index 00000000..1eb2045e
--- /dev/null
+++ b/docs/transformers/yaml.ts
@@ -0,0 +1,24 @@
+import { parseFrontMatter } from 'remark-mdc'
+import type { ParsedContent } from '../types'
+import { defineTransformer } from './utils'
+
+export default defineTransformer({
+ name: 'Yaml',
+ extensions: ['.yml', '.yaml'],
+ parse: async (_id, content) => {
+ const { data } = await parseFrontMatter(`---\n${content}\n---`)
+
+ // Keep array contents under `body` key
+ let parsed = data
+ if (Array.isArray(data)) {
+ console.warn(`YAML array is not supported in ${_id}, moving the array into the \`body\` key`)
+ parsed = { body: data }
+ }
+
+ return {
+ ...parsed,
+ _id,
+ _type: 'yaml',
+ }
+ },
+})
diff --git a/docs/types.d.ts b/docs/types.d.ts
index 3ed83ba4..60b77f72 100644
--- a/docs/types.d.ts
+++ b/docs/types.d.ts
@@ -501,15 +501,3 @@ export interface NavItem {
[key: string]: any
}
-
-// Highlight
-export interface HighlightParams {
- code: string
- lang: string
- theme: Theme | Record
-}
-
-export interface HighlightThemedToken {
- content: string
- color?: string | Record
-}
diff --git a/examples/vue3/package.json b/examples/vue3/package.json
index 081dc6f5..6488134c 100644
--- a/examples/vue3/package.json
+++ b/examples/vue3/package.json
@@ -19,7 +19,7 @@
"postcss": "^8.4.18",
"tailwindcss": "^3.2.2",
"typescript": "^4.6.4",
- "vite": "^4.0.3",
+ "vite": "^4.0.4",
"vue-tsc": "^1.0.18"
}
}
\ No newline at end of file
diff --git a/package.json b/package.json
index 1faae66a..e65be27b 100644
--- a/package.json
+++ b/package.json
@@ -32,7 +32,7 @@
"sass": "^1.55.0",
"ts-node": "^10.9.1",
"typescript": "^4.8.4",
- "vite": "^4.0.3",
+ "vite": "^4.0.4",
"vue-tsc": "^1.0.24"
}
}
\ No newline at end of file
diff --git a/packages/vue-final-modal/src/useApi.ts b/packages/vue-final-modal/src/useApi.ts
index 7fe5ac5e..2da611eb 100644
--- a/packages/vue-final-modal/src/useApi.ts
+++ b/packages/vue-final-modal/src/useApi.ts
@@ -57,7 +57,7 @@ function defineModal<
>(_options: UseModalOptions): UseModalReturnType {
const options = reactive({
id: Symbol('useModal'),
- modelValue: !!_options.defaultModelValue,
+ modelValue: !!_options?.defaultModelValue,
...withMarkRaw(_options),
}) as UseModalOptionsPrivate
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 3a124469..b4da4b4e 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -15,7 +15,7 @@ importers:
sass: ^1.55.0
ts-node: ^10.9.1
typescript: ^4.8.4
- vite: ^4.0.3
+ vite: ^4.0.4
vue: ^3.2.45
vue-tsc: ^1.0.24
dependencies:
@@ -12701,7 +12701,7 @@ packages:
pathe: 0.2.0
source-map: 0.6.1
source-map-support: 0.5.21
- vite: 3.2.5
+ vite: 4.0.4
transitivePeerDependencies:
- '@types/node'
- less
@@ -12812,6 +12812,39 @@ packages:
fsevents: 2.3.2
dev: true
+ /vite/4.0.4:
+ resolution: {integrity: sha512-xevPU7M8FU0i/80DMR+YhgrzR5KS2ORy1B4xcX/cXLsvnUWvfHuqMmVU6N0YiJ4JWGRJJsLCgjEzKjG9/GKoSw==}
+ engines: {node: ^14.18.0 || >=16.0.0}
+ hasBin: true
+ peerDependencies:
+ '@types/node': '>= 14'
+ less: '*'
+ sass: '*'
+ stylus: '*'
+ sugarss: '*'
+ terser: ^5.4.0
+ peerDependenciesMeta:
+ '@types/node':
+ optional: true
+ less:
+ optional: true
+ sass:
+ optional: true
+ stylus:
+ optional: true
+ sugarss:
+ optional: true
+ terser:
+ optional: true
+ dependencies:
+ esbuild: 0.16.17
+ postcss: 8.4.21
+ resolve: 1.22.1
+ rollup: 3.10.0
+ optionalDependencies:
+ fsevents: 2.3.2
+ dev: true
+
/vite/4.0.4_@types+node@18.11.18:
resolution: {integrity: sha512-xevPU7M8FU0i/80DMR+YhgrzR5KS2ORy1B4xcX/cXLsvnUWvfHuqMmVU6N0YiJ4JWGRJJsLCgjEzKjG9/GKoSw==}
engines: {node: ^14.18.0 || >=16.0.0}