Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use import attributes rather than json assertions #24

Merged
merged 1 commit into from
Jul 13, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions packages/@contentlayer/core/src/generation/generate-dotpkg.ts
Original file line number Diff line number Diff line change
Expand Up @@ -192,16 +192,16 @@ const writeFilesForCache = ({
T.succeedWith(() => process.versions.node.split('.').map((_) => parseInt(_, 10)) as [number, number, number]),
)

// NOTE Type assert statements for `.json` files are neccessary from Node v16.14 onwards
const needsJsonAssertStatement = nodeVersionMajor > 16 || (nodeVersionMajor === 16 && nodeVersionMinor >= 14)
const assertStatement = needsJsonAssertStatement ? ` assert { type: 'json' }` : ''
// NOTE: An import attribute for `.json` files is neccessary from Node v16.14 onwards.
const needsTypeJsonAttribute = nodeVersionMajor > 16 || (nodeVersionMajor === 16 && nodeVersionMinor >= 14)
const importAttributeStatement = needsJsonimportAttributeStatement ? ` with { type: 'json' }` : ''

const typeNameField = generationOptions.options.fieldOptions.typeFieldName
const dataBarrelFiles = documentDefs.map((docDef) => ({
content: makeDataExportFile({
docDef,
documentIds: allDocuments.filter((_) => _[typeNameField] === docDef.name).map((_) => _._id),
assertStatement,
importAttributeStatement,
}),
filePath: withPrefix('generated', docDef.name, `_index.mjs`),
}))
Expand Down Expand Up @@ -255,7 +255,7 @@ const writeFilesForCache = ({
filePath: withPrefix('generated', 'index.mjs'),
content: makeIndexMjs({
schemaDef,
assertStatement,
importAttributeStatement,
bundleFilePath: relativeBundleFilePath,
isDev,
options,
Expand Down Expand Up @@ -338,19 +338,19 @@ const writeFileWithWrittenFilesCache =
const makeDataExportFile = ({
docDef,
documentIds,
assertStatement,
importAttributeStatement,
}: {
docDef: DocumentTypeDef
documentIds: string[]
assertStatement: string
importAttributeStatement: string
}): string => {
const dataVariableName = getDataVariableName({ docDef })

if (docDef.isSingleton) {
const documentId = documentIds[0]!
return `\
// ${autogeneratedNote}
export { default as ${dataVariableName} } from './${idToFileName(documentId)}.json'${assertStatement}
export { default as ${dataVariableName} } from './${idToFileName(documentId)}.json'${importAttributeStatement}
`
}

Expand All @@ -376,7 +376,7 @@ export { default as ${dataVariableName} } from './${idToFileName(documentId)}.js
)

const docImports = documentIds
.map((_) => `import ${idToVariableNameMap.get(_)} from './${idToFileName(_)}.json'${assertStatement}`)
.map((_) => `import ${idToVariableNameMap.get(_)} from './${idToFileName(_)}.json'${importAttributeStatement}`)
.join('\n')

return `\
Expand All @@ -390,13 +390,13 @@ export const ${dataVariableName} = [${Array.from(idToVariableNameMap.values()).j

const makeIndexMjs = ({
schemaDef,
assertStatement,
importAttributeStatement,
bundleFilePath,
options,
isDev,
}: {
schemaDef: SchemaDef
assertStatement: string
importAttributeStatement: string
bundleFilePath: RelativePosixFilePath
options: PluginOptions
isDev: boolean
Expand All @@ -413,7 +413,7 @@ const makeIndexMjs = ({
.map(({ documentDefName, dataVariableName }) =>
isDev
? `import { ${dataVariableName} } from './${documentDefName}/_index.mjs'`
: `import ${dataVariableName} from './${documentDefName}/_index.json'${assertStatement}`,
: `import ${dataVariableName} from './${documentDefName}/_index.json'${importAttributeStatement}`,
)
.join('\n')

Expand All @@ -434,7 +434,7 @@ export const fetchContent = async (sourceKey) => {
const worker = new Worker(workerFilePath, { workerData: { sourceKey } })

return new Promise((resolve, reject) => {
worker.on('message', (data) => {
worker.on('message', (data) => {
if (data.result) {
resolve(data.result)
} else if (data.fatalError) {
Expand Down Expand Up @@ -485,7 +485,7 @@ export const makeDataTypes = ({ schemaDef, options }: { schemaDef: SchemaDef; op
if (options.experimental.enableDynamicBuild === false) return ''

return `\
export type FetchContentResult =
export type FetchContentResult =
| { _tag: 'Error', error: SourceProvideSchemaErrorJSON | SourceFetchDataErrorJSON }
| { _tag: 'Data', data: DataExports }

Expand Down