diff --git a/packages/next/src/server/app-render/postponed-state.test.ts b/packages/next/src/server/app-render/postponed-state.test.ts index dbc03f17760b3..53b3375a4bb7c 100644 --- a/packages/next/src/server/app-render/postponed-state.test.ts +++ b/packages/next/src/server/app-render/postponed-state.test.ts @@ -1,5 +1,8 @@ import { createPrerenderResumeDataCache } from '../resume-data-cache/resume-data-cache' -import { streamFromString } from '../stream-utils/node-web-streams-helper' +import { + streamFromString, + streamToString, +} from '../stream-utils/node-web-streams-helper' import { DynamicState, getDynamicDataPostponedState, @@ -31,9 +34,30 @@ describe('getDynamicHTMLPostponedState', () => { prerenderResumeDataCache ) - expect(state).toMatchInlineSnapshot( - `"169:39[["slug","%%drp:slug:e9615126684e5%%"]]{"%%drp:slug:e9615126684e5%%":"%%drp:slug:e9615126684e5%%","nested":{"%%drp:slug:e9615126684e5%%":"%%drp:slug:e9615126684e5%%"}}{"store":{"fetch":{},"cache":{"1":{"value":"aGVsbG8=","tags":[],"stale":0,"timestamp":0,"expire":0,"revalidate":0}}}}"` - ) + const parsed = parsePostponedState(state, { slug: '123' }) + expect(parsed).toMatchInlineSnapshot(` + { + "data": { + "123": "123", + "nested": { + "123": "123", + }, + }, + "renderResumeDataCache": { + "cache": Map { + "1" => Promise {}, + }, + "fetch": Map {}, + }, + "type": 2, + } + `) + + const value = await parsed.renderResumeDataCache.cache.get('1') + + expect(value).toBeDefined() + + await expect(streamToString(value!.value)).resolves.toEqual('hello') }) it('serializes a HTML postponed state without fallback params', async () => { diff --git a/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts b/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts index be61cb8d3853f..b4bb00beebb83 100644 --- a/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts +++ b/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts @@ -4,6 +4,24 @@ import { } from './resume-data-cache' import { createPrerenderResumeDataCache } from './resume-data-cache' import { streamFromString } from '../stream-utils/node-web-streams-helper' +import { inflateSync } from 'node:zlib' + +function createCacheWithSingleEntry() { + const cache = createPrerenderResumeDataCache() + cache.cache.set( + 'key', + Promise.resolve({ + value: streamFromString('value'), + tags: [], + stale: 0, + timestamp: 0, + expire: 0, + revalidate: 0, + }) + ) + + return cache +} describe('stringifyResumeDataCache', () => { it('serializes an empty cache', async () => { @@ -12,20 +30,17 @@ describe('stringifyResumeDataCache', () => { }) it('serializes a cache with a single entry', async () => { - const cache = createPrerenderResumeDataCache() - cache.cache.set( - 'key', - Promise.resolve({ - value: streamFromString('value'), - tags: [], - stale: 0, - timestamp: 0, - expire: 0, - revalidate: 0, - }) - ) + const cache = createCacheWithSingleEntry() + const compressed = await stringifyResumeDataCache(cache) + + // We have to decompress the output because the compressed string is not + // deterministic. If it fails here it's because the compressed string is + // different. + const decompressed = inflateSync( + Buffer.from(compressed, 'base64') + ).toString('utf-8') - expect(await stringifyResumeDataCache(cache)).toMatchInlineSnapshot( + expect(decompressed).toMatchInlineSnapshot( `"{"store":{"fetch":{},"cache":{"key":{"value":"dmFsdWU=","tags":[],"stale":0,"timestamp":0,"expire":0,"revalidate":0}}}}"` ) }) @@ -37,4 +52,14 @@ describe('parseResumeDataCache', () => { createPrerenderResumeDataCache() ) }) + + it('parses a cache with a single entry', async () => { + const cache = createCacheWithSingleEntry() + const serialized = await stringifyResumeDataCache(cache) + + const parsed = createRenderResumeDataCache(serialized) + + expect(parsed.cache.size).toBe(1) + expect(parsed.fetch.size).toBe(0) + }) }) diff --git a/packages/next/src/server/resume-data-cache/resume-data-cache.ts b/packages/next/src/server/resume-data-cache/resume-data-cache.ts index 87aa3cf68db25..a63998e4fc203 100644 --- a/packages/next/src/server/resume-data-cache/resume-data-cache.ts +++ b/packages/next/src/server/resume-data-cache/resume-data-cache.ts @@ -1,3 +1,4 @@ +import { InvariantError } from '../../shared/lib/invariant-error' import { type UseCacheCacheStore, type FetchCacheStore, @@ -64,22 +65,32 @@ type ResumeStoreSerialized = { export async function stringifyResumeDataCache( resumeDataCache: RenderResumeDataCache | PrerenderResumeDataCache ): Promise { - if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) { - return 'null' - } + if (process.env.NEXT_RUNTIME === 'edge') { + throw new InvariantError( + '`stringifyResumeDataCache` should not be called in edge runtime.' + ) + } else { + if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) { + return 'null' + } - const json: ResumeStoreSerialized = { - store: { - fetch: Object.fromEntries( - stringifyFetchCacheStore(resumeDataCache.fetch.entries()) - ), - cache: Object.fromEntries( - await stringifyUseCacheCacheStore(resumeDataCache.cache.entries()) - ), - }, - } + const json: ResumeStoreSerialized = { + store: { + fetch: Object.fromEntries( + stringifyFetchCacheStore(resumeDataCache.fetch.entries()) + ), + cache: Object.fromEntries( + await stringifyUseCacheCacheStore(resumeDataCache.cache.entries()) + ), + }, + } - return JSON.stringify(json) + // Compress the JSON string using zlib. As the data we already want to + // decompress is in memory, we use the synchronous deflateSync function. + const { deflateSync } = require('node:zlib') as typeof import('node:zlib') + + return deflateSync(JSON.stringify(json)).toString('base64') + } } /** @@ -114,24 +125,38 @@ export function createRenderResumeDataCache( export function createRenderResumeDataCache( prerenderResumeDataCacheOrPersistedCache: PrerenderResumeDataCache | string ): RenderResumeDataCache { - if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') { - // If the cache is already a prerender cache, we can return it directly, - // we're just performing a type change. - return prerenderResumeDataCacheOrPersistedCache - } + if (process.env.NEXT_RUNTIME === 'edge') { + throw new InvariantError( + '`createRenderResumeDataCache` should not be called in edge runtime.' + ) + } else { + if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') { + // If the cache is already a prerender cache, we can return it directly, + // we're just performing a type change. + return prerenderResumeDataCacheOrPersistedCache + } - if (prerenderResumeDataCacheOrPersistedCache === 'null') { - return { - cache: new Map(), - fetch: new Map(), + if (prerenderResumeDataCacheOrPersistedCache === 'null') { + return { + cache: new Map(), + fetch: new Map(), + } } - } - const json: ResumeStoreSerialized = JSON.parse( - prerenderResumeDataCacheOrPersistedCache - ) - return { - cache: parseUseCacheCacheStore(Object.entries(json.store.cache)), - fetch: parseFetchCacheStore(Object.entries(json.store.fetch)), + // This should be a compressed string. Let's decompress it using zlib. + // As the data we already want to decompress is in memory, we use the + // synchronous inflateSync function. + const { inflateSync } = require('node:zlib') as typeof import('node:zlib') + + const json: ResumeStoreSerialized = JSON.parse( + inflateSync( + Buffer.from(prerenderResumeDataCacheOrPersistedCache, 'base64') + ).toString('utf-8') + ) + + return { + cache: parseUseCacheCacheStore(Object.entries(json.store.cache)), + fetch: parseFetchCacheStore(Object.entries(json.store.fetch)), + } } }