From c0d74e8ac935c6700244acf3449f9abaf4568414 Mon Sep 17 00:00:00 2001 From: Wyatt Johnson Date: Tue, 26 Nov 2024 16:00:17 -0700 Subject: [PATCH] feat: added compression to resume data cache using deflate --- .../resume-data-cache.test.ts | 43 +++++++--- .../resume-data-cache/resume-data-cache.ts | 85 ++++++++++++------- 2 files changed, 84 insertions(+), 44 deletions(-) diff --git a/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts b/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts index be61cb8d3853f6..5029d3dff1afca 100644 --- a/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts +++ b/packages/next/src/server/resume-data-cache/resume-data-cache.test.ts @@ -5,6 +5,23 @@ import { import { createPrerenderResumeDataCache } from './resume-data-cache' import { streamFromString } from '../stream-utils/node-web-streams-helper' +function createCacheWithSingleEntry() { + const cache = createPrerenderResumeDataCache() + cache.cache.set( + 'key', + Promise.resolve({ + value: streamFromString('value'), + tags: [], + stale: 0, + timestamp: 0, + expire: 0, + revalidate: 0, + }) + ) + + return cache +} + describe('stringifyResumeDataCache', () => { it('serializes an empty cache', async () => { const cache = createPrerenderResumeDataCache() @@ -12,21 +29,9 @@ describe('stringifyResumeDataCache', () => { }) it('serializes a cache with a single entry', async () => { - const cache = createPrerenderResumeDataCache() - cache.cache.set( - 'key', - Promise.resolve({ - value: streamFromString('value'), - tags: [], - stale: 0, - timestamp: 0, - expire: 0, - revalidate: 0, - }) - ) - + const cache = createCacheWithSingleEntry() expect(await stringifyResumeDataCache(cache)).toMatchInlineSnapshot( - `"{"store":{"fetch":{},"cache":{"key":{"value":"dmFsdWU=","tags":[],"stale":0,"timestamp":0,"expire":0,"revalidate":0}}}}"` + `"eJwlibEKwzAMBf/lzR46C7r2D0qHkEHYSmNq02ApocHo30PS6bi7DrVvE1DHJBZnUPeAyHG+2kf2ExuXVUBI9aHp9bwjwPitoGEMUOMioFuA5SpqXJfL5Lfk9h9NNi45sZ3q7n4AKDonOw=="` ) }) }) @@ -37,4 +42,14 @@ describe('parseResumeDataCache', () => { createPrerenderResumeDataCache() ) }) + + it('parses a cache with a single entry', async () => { + const cache = createCacheWithSingleEntry() + const serialized = await stringifyResumeDataCache(cache) + + const parsed = createRenderResumeDataCache(serialized) + + expect(parsed.cache.size).toBe(1) + expect(parsed.fetch.size).toBe(0) + }) }) diff --git a/packages/next/src/server/resume-data-cache/resume-data-cache.ts b/packages/next/src/server/resume-data-cache/resume-data-cache.ts index 87aa3cf68db256..a63998e4fc2034 100644 --- a/packages/next/src/server/resume-data-cache/resume-data-cache.ts +++ b/packages/next/src/server/resume-data-cache/resume-data-cache.ts @@ -1,3 +1,4 @@ +import { InvariantError } from '../../shared/lib/invariant-error' import { type UseCacheCacheStore, type FetchCacheStore, @@ -64,22 +65,32 @@ type ResumeStoreSerialized = { export async function stringifyResumeDataCache( resumeDataCache: RenderResumeDataCache | PrerenderResumeDataCache ): Promise { - if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) { - return 'null' - } + if (process.env.NEXT_RUNTIME === 'edge') { + throw new InvariantError( + '`stringifyResumeDataCache` should not be called in edge runtime.' + ) + } else { + if (resumeDataCache.fetch.size === 0 && resumeDataCache.cache.size === 0) { + return 'null' + } - const json: ResumeStoreSerialized = { - store: { - fetch: Object.fromEntries( - stringifyFetchCacheStore(resumeDataCache.fetch.entries()) - ), - cache: Object.fromEntries( - await stringifyUseCacheCacheStore(resumeDataCache.cache.entries()) - ), - }, - } + const json: ResumeStoreSerialized = { + store: { + fetch: Object.fromEntries( + stringifyFetchCacheStore(resumeDataCache.fetch.entries()) + ), + cache: Object.fromEntries( + await stringifyUseCacheCacheStore(resumeDataCache.cache.entries()) + ), + }, + } - return JSON.stringify(json) + // Compress the JSON string using zlib. As the data we already want to + // decompress is in memory, we use the synchronous deflateSync function. + const { deflateSync } = require('node:zlib') as typeof import('node:zlib') + + return deflateSync(JSON.stringify(json)).toString('base64') + } } /** @@ -114,24 +125,38 @@ export function createRenderResumeDataCache( export function createRenderResumeDataCache( prerenderResumeDataCacheOrPersistedCache: PrerenderResumeDataCache | string ): RenderResumeDataCache { - if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') { - // If the cache is already a prerender cache, we can return it directly, - // we're just performing a type change. - return prerenderResumeDataCacheOrPersistedCache - } + if (process.env.NEXT_RUNTIME === 'edge') { + throw new InvariantError( + '`createRenderResumeDataCache` should not be called in edge runtime.' + ) + } else { + if (typeof prerenderResumeDataCacheOrPersistedCache !== 'string') { + // If the cache is already a prerender cache, we can return it directly, + // we're just performing a type change. + return prerenderResumeDataCacheOrPersistedCache + } - if (prerenderResumeDataCacheOrPersistedCache === 'null') { - return { - cache: new Map(), - fetch: new Map(), + if (prerenderResumeDataCacheOrPersistedCache === 'null') { + return { + cache: new Map(), + fetch: new Map(), + } } - } - const json: ResumeStoreSerialized = JSON.parse( - prerenderResumeDataCacheOrPersistedCache - ) - return { - cache: parseUseCacheCacheStore(Object.entries(json.store.cache)), - fetch: parseFetchCacheStore(Object.entries(json.store.fetch)), + // This should be a compressed string. Let's decompress it using zlib. + // As the data we already want to decompress is in memory, we use the + // synchronous inflateSync function. + const { inflateSync } = require('node:zlib') as typeof import('node:zlib') + + const json: ResumeStoreSerialized = JSON.parse( + inflateSync( + Buffer.from(prerenderResumeDataCacheOrPersistedCache, 'base64') + ).toString('utf-8') + ) + + return { + cache: parseUseCacheCacheStore(Object.entries(json.store.cache)), + fetch: parseFetchCacheStore(Object.entries(json.store.fetch)), + } } }