Skip to content

Commit

Permalink
feat: multi source collection
Browse files Browse the repository at this point in the history
  • Loading branch information
farnabaz committed Nov 25, 2024
1 parent c96fd9d commit acfbe96
Show file tree
Hide file tree
Showing 8 changed files with 97 additions and 77 deletions.
89 changes: 45 additions & 44 deletions src/module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -274,53 +274,54 @@ async function processCollectionItems(nuxt: Nuxt, collections: ResolvedCollectio
if (!collection.source) {
continue
}
for await (const source of collection.source) {
if (source.prepare) {
await source.prepare(nuxt)
}

if (collection.source.prepare) {
await collection.source.prepare(nuxt)
}

const { fixed } = parseSourceBase(collection.source)
const cwd = collection.source.cwd
const _keys = await fastGlob(collection.source.include, { cwd, ignore: collection.source!.exclude || [], dot: true })
.catch(() => [])

filesCount += _keys.length

const list: Array<Array<string>> = []
for await (const chunk of chunks(_keys, 25)) {
await Promise.all(chunk.map(async (key) => {
key = key.substring(fixed.length)
const keyInCollection = join(collection.name, collection.source?.prefix || '', key)

const content = await readFile(join(cwd, fixed, key), 'utf8')
const checksum = getContentChecksum(configHash + collectionHash + content)
const cache = databaseContents[keyInCollection]

let parsedContent
if (cache && cache.checksum === checksum) {
cachedFilesCount += 1
parsedContent = JSON.parse(cache.parsedContent)
}
else {
parsedFilesCount += 1
parsedContent = await parseContent(keyInCollection, content, collection, nuxt)
db.insertDevelopmentCache(keyInCollection, checksum, JSON.stringify(parsedContent))
}

list.push([key, generateCollectionInsert(collection, parsedContent)])
}))
}
// Sort by file name to ensure consistent order
list.sort((a, b) => String(a[0]).localeCompare(String(b[0])))
collectionDump[collection.name]!.push(...list.map(([, sql]) => sql!))
const { fixed } = parseSourceBase(source)
const cwd = source.cwd
const _keys = await fastGlob(source.include, { cwd, ignore: source!.exclude || [], dot: true })
.catch(() => [])

filesCount += _keys.length

const list: Array<Array<string>> = []
for await (const chunk of chunks(_keys, 25)) {
await Promise.all(chunk.map(async (key) => {
key = key.substring(fixed.length)
const keyInCollection = join(collection.name, source?.prefix || '', key)

const content = await readFile(join(cwd, fixed, key), 'utf8')
const checksum = getContentChecksum(configHash + collectionHash + content)
const cache = databaseContents[keyInCollection]

let parsedContent
if (cache && cache.checksum === checksum) {
cachedFilesCount += 1
parsedContent = JSON.parse(cache.parsedContent)
}
else {
parsedFilesCount += 1
parsedContent = await parseContent(keyInCollection, content, collection, nuxt)
db.insertDevelopmentCache(keyInCollection, checksum, JSON.stringify(parsedContent))
}

list.push([key, generateCollectionInsert(collection, parsedContent)])
}))
}
// Sort by file name to ensure consistent order
list.sort((a, b) => String(a[0]).localeCompare(String(b[0])))
collectionDump[collection.name]!.push(...list.map(([, sql]) => sql!))

collectionChecksum[collection.name] = hash(collectionDump[collection.name])
collectionChecksum[collection.name] = hash(collectionDump[collection.name])

collectionDump[collection.name]!.push(
generateCollectionTableDefinition(infoCollection, { drop: false }),
`DELETE FROM ${infoCollection.tableName} WHERE id = 'checksum_${collection.name}'`,
generateCollectionInsert(infoCollection, { id: `checksum_${collection.name}`, version: collectionChecksum[collection.name] }),
)
collectionDump[collection.name]!.push(
generateCollectionTableDefinition(infoCollection, { drop: false }),
`DELETE FROM ${infoCollection.tableName} WHERE id = 'checksum_${collection.name}'`,
generateCollectionInsert(infoCollection, { id: `checksum_${collection.name}`, version: collectionChecksum[collection.name] }),
)
}
}

const sqlDumpList = Object.values(collectionDump).flatMap(a => a)
Expand Down
2 changes: 1 addition & 1 deletion src/runtime/internal/schema.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import type { ZodRawShape } from 'zod'

export function getOrderedSchemaKeys(shape: ZodRawShape) {
export function getOrderedSchemaKeys(shape: ZodRawShape | Record<string, unknown>) {
const keys = new Set([
shape.id ? 'id' : undefined,
shape.title ? 'title' : undefined,
Expand Down
10 changes: 5 additions & 5 deletions src/types/collection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,21 +24,21 @@ export interface ResolvedCollectionSource extends CollectionSource {

export interface PageCollection<T extends ZodRawShape = ZodRawShape> {
type: 'page'
source?: string | CollectionSource
source?: string | CollectionSource | CollectionSource[]
schema?: ZodObject<T>
}

export interface DataCollection<T extends ZodRawShape = ZodRawShape> {
type: 'data'
source?: string | CollectionSource
source?: string | CollectionSource | CollectionSource[]
schema: ZodObject<T>
}

export type Collection<T extends ZodRawShape = ZodRawShape> = PageCollection<T> | DataCollection<T>

export interface DefinedCollection<T extends ZodRawShape = ZodRawShape> {
type: CollectionType
source: ResolvedCollectionSource | undefined
source: ResolvedCollectionSource[] | undefined
schema: ZodObject<T>
extendedSchema: ZodObject<T>
jsonFields: string[]
Expand All @@ -48,7 +48,7 @@ export interface ResolvedCollection<T extends ZodRawShape = ZodRawShape> {
name: string
tableName: string
type: CollectionType
source: ResolvedCollectionSource | undefined
source: ResolvedCollectionSource[] | undefined
schema: ZodObject<T>
extendedSchema: ZodObject<T>
jsonFields: string[]
Expand All @@ -63,7 +63,7 @@ export interface CollectionInfo {
name: string
pascalName: string
tableName: string
source: CollectionSource
source: CollectionSource[]
type: CollectionType
schema: JsonSchema7Type & {
$schema?: string
Expand Down
7 changes: 7 additions & 0 deletions src/types/module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,3 +178,10 @@ export interface RuntimeConfig {
localDatabase: SqliteDatabaseConfig
}
}

export interface PublicRuntimeConfig {
studio: {
apiURL?: string
iframeMessagingAllowedOrigins?: string
}
}
22 changes: 13 additions & 9 deletions src/utils/collection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,24 +73,28 @@ export function resolveCollections(collections: Record<string, DefinedCollection
/**
* Process collection source and return refined source
*/
function resolveSource(source: string | CollectionSource | undefined): ResolvedCollectionSource | undefined {
function resolveSource(source: string | CollectionSource | CollectionSource[] | undefined): ResolvedCollectionSource[] | undefined {
if (!source) {
return undefined
}

if (typeof source === 'string') {
return defineLocalSource({ include: source })
return [defineLocalSource({ include: source })]
}

if ((source as ResolvedCollectionSource)._resolved) {
return source as ResolvedCollectionSource
}
const sources: CollectionSource[] = Array.isArray(source) ? source : [source]

if (source.repository) {
return defineGitHubSource(source)
}
return sources.map((source) => {
if ((source as ResolvedCollectionSource)._resolved) {
return source as ResolvedCollectionSource
}

return defineLocalSource(source)
if (source.repository) {
return defineGitHubSource(source)
}

return defineLocalSource(source)
})
}

// Convert collection data to SQL insert statement
Expand Down
25 changes: 16 additions & 9 deletions src/utils/dev.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,12 @@ export async function watchContents(nuxt: Nuxt, options: ModuleOptions, manifest
const db = localDatabase(options._localDatabase!.filename)
const collections = manifest.collections

const localCollections = collections.filter(c => c.source && !c.source.repository)
const sourceMap = collections.flatMap((c) => {
return c.source
? c.source.filter(s => !s.repository).map(s => ({ collection: c, source: s }))
: []
})
// const localCollections = collections.filter(c => c.source && !c.source.repository)

const watcher = chokidar.watch('.', { ignoreInitial: true, cwd })

Expand Down Expand Up @@ -85,13 +90,14 @@ export async function watchContents(nuxt: Nuxt, options: ModuleOptions, manifest
}

async function onChange(path: string) {
const collection = localCollections.find(({ source }) => micromatch.isMatch(path, source!.include, { ignore: source!.exclude || [], dot: true }))
if (collection) {
const match = sourceMap.find(({ source }) => micromatch.isMatch(path, source!.include, { ignore: source!.exclude || [], dot: true }))
if (match) {
const { collection, source } = match
logger.info(`File \`${path}\` changed on \`${collection.name}\` collection`)
const { fixed } = parseSourceBase(collection.source!)
const { fixed } = parseSourceBase(source)

const filePath = path.substring(fixed.length)
const keyInCollection = join(collection.name, collection.source?.prefix || '', filePath)
const keyInCollection = join(collection.name, source?.prefix || '', filePath)

const content = await readFile(join(nuxt.options.rootDir, 'content', path), 'utf8')
const checksum = getContentChecksum(content)
Expand All @@ -113,13 +119,14 @@ export async function watchContents(nuxt: Nuxt, options: ModuleOptions, manifest
}

async function onRemove(path: string) {
const collection = localCollections.find(({ source }) => micromatch.isMatch(path, source!.include, { ignore: source!.exclude || [], dot: true }))
if (collection) {
const match = sourceMap.find(({ source }) => micromatch.isMatch(path, source!.include, { ignore: source!.exclude || [], dot: true }))
if (match) {
const { collection, source } = match
logger.info(`File \`${path}\` removed from \`${collection.name}\` collection`)
const { fixed } = parseSourceBase(collection.source!)
const { fixed } = parseSourceBase(source)

const filePath = path.substring(fixed.length)
const keyInCollection = join(collection.name, collection.source?.prefix || '', filePath)
const keyInCollection = join(collection.name, source?.prefix || '', filePath)

await db.deleteDevelopmentCache(keyInCollection)

Expand Down
3 changes: 2 additions & 1 deletion test/base.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ describe('empty', async () => {
expect(pagesCollection).toBeDefined()
expect(pagesCollection?.type).toBe('page')
expect(pagesCollection?.source).toBeDefined()
expect(pagesCollection?.source?.include).toBe('**/*')
expect(pagesCollection?.source[0]).toBeDefined()
expect(pagesCollection?.source[0].include).toBe('**/*')
})
})

Expand Down
16 changes: 8 additions & 8 deletions test/unit/defineCollection.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ describe('defineCollection', () => {
})
expect(collection).toMatchObject({
type: 'page',
source: {
source: [{
_resolved: true,
include: 'pages/**',
cwd: '',
},
}],
})

expect(collection.schema.shape).not.ownProperty('title')
Expand Down Expand Up @@ -61,12 +61,12 @@ describe('defineCollection', () => {

expect(collection).toMatchObject({
type: 'page',
source: {
source: [{
include: 'pages/**',
prefix: 'blog',
exclude: ['pages/blog/index.md'],
cwd: '',
},
}],
})

expect(collection.schema.shape).ownProperty('customField')
Expand All @@ -86,11 +86,11 @@ describe('defineCollection', () => {

expect(collection).toMatchObject({
type: 'data',
source: {
source: [{
_resolved: true,
include: 'data/**',
cwd: '',
},
}],
})

expect(collection.schema.shape).toHaveProperty('customField')
Expand All @@ -115,12 +115,12 @@ describe('defineCollection', () => {

expect(collection).toMatchObject({
type: 'data',
source: {
source: [{
include: 'data/**',
cwd: '',
prefix: 'blog',
exclude: ['data/blog/index.md'],
},
}],
})
})
})

0 comments on commit acfbe96

Please sign in to comment.