From 8818f5516ca909cc941deb953b6359030a8c0301 Mon Sep 17 00:00:00 2001 From: MrBBot Date: Fri, 28 Apr 2023 13:20:16 +0100 Subject: [PATCH] Improve Workers Sites asset sync reliability (#3098) * Improve Workers Sites asset sync reliability - When splitting upload into buckets, just store the file path, not the full content. This means we have to read the contents twice, but avoids buffering all content before uploading, causing OOMs. - Limit in-flight bulk upload requests to 5, avoiding `Too many bulk operations already in progress.` error. - Fix logging of upload progress. Previous, progress was logged per upload bucket, which doesn't really make sense to end users. Now, upload progress is across all files in all buckets. - Only log first 100 changed assets by default. The rest can be shown by setting `WRANGLER_LOG=debug`. This avoids console spam when uploading sites with 1000s of files. A little bit of colour has also been added to the diff. :) Closes #2223 Closes #2245 * fixup! Improve Workers Sites asset sync reliability Move fetching list log * fixup! Improve Workers Sites asset sync reliability Read files for upload in serial * fixup! Improve Workers Sites asset sync reliability Add test for upload failing * fixup! Improve Workers Sites asset sync reliability Ensure publish tests not dependent on bucket upload order --- .changeset/clever-radios-cover.md | 10 + .../wrangler/src/__tests__/publish.test.ts | 794 ++++++++++++++---- packages/wrangler/src/kv/helpers.ts | 11 +- packages/wrangler/src/sites.ts | 216 +++-- 4 files changed, 791 insertions(+), 240 deletions(-) create mode 100644 .changeset/clever-radios-cover.md diff --git a/.changeset/clever-radios-cover.md b/.changeset/clever-radios-cover.md new file mode 100644 index 000000000000..2a5c81f62300 --- /dev/null +++ b/.changeset/clever-radios-cover.md @@ -0,0 +1,10 @@ +--- +"wrangler": minor +--- + +fix: improve Workers Sites asset upload reliability + +- Wrangler no longer buffers all assets into memory before uploading. This should prevent out-of-memory errors when publishing sites with many large files. +- Wrangler now limits the number of in-flight asset upload requests to 5, fixing the `Too many bulk operations already in progress` error. +- Wrangler now correctly logs upload progress. Previously, the reported percentage was per upload request group, not across all assets. +- Wrangler no longer logs all assets to the console by default. Instead, it will just log the first 100. The rest can be shown by setting the `WRANGLER_LOG=debug` environment variable. A splash of colour has also been added. diff --git a/packages/wrangler/src/__tests__/publish.test.ts b/packages/wrangler/src/__tests__/publish.test.ts index df3abef4fee8..5886ce7771d2 100644 --- a/packages/wrangler/src/__tests__/publish.test.ts +++ b/packages/wrangler/src/__tests__/publish.test.ts @@ -10,6 +10,7 @@ import { printBundleSize, printOffendingDependencies, } from "../bundle-reporter"; +import { logger } from "../logger"; import { writeAuthConfigFile } from "../user"; import { mockAccountId, mockApiToken } from "./helpers/mock-account-id"; import { mockAuthDomain } from "./helpers/mock-auth-domain"; @@ -68,6 +69,7 @@ describe("publish", () => { setIsTTY(true); mockLastDeploymentRequest(); mockDeploymentsListRequest(); + logger.loggerLevel = "log"; }); afterEach(() => { @@ -1551,12 +1553,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -1604,12 +1607,16 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets); await runWrangler("publish --config ./my-site/wrangler.toml"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -1695,12 +1702,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -1744,12 +1752,16 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets); await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -1786,12 +1798,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -1977,12 +1990,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading subdir/file-1.txt... - Uploading as subdir/file-1.2ca234f380.txt... - Reading subdir/file-2.txt... - Uploading as subdir/file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + subdir/file-1.2ca234f380.txt (uploading new version of subdir/file-1.txt) + + subdir/file-2.5938485188.txt (uploading new version of subdir/file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2025,12 +2039,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading subdir/file-1.txt... - Uploading as subdir/file-1.2ca234f380.txt... - Reading subdir/file-2.txt... - Uploading as subdir/file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + subdir/file-1.2ca234f380.txt (uploading new version of subdir/file-1.txt) + + subdir/file-2.5938485188.txt (uploading new version of subdir/file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2082,12 +2097,16 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + subdir/file-1.2ca234f380.txt (uploading new version of subdir/file-1.txt) + + subdir/file-2.5938485188.txt (uploading new version of subdir/file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading subdir/file-1.txt... - Uploading as subdir/file-1.2ca234f380.txt... - Reading subdir/file-2.txt... - Uploading as subdir/file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2140,12 +2159,16 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2192,12 +2215,16 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2242,12 +2269,16 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets); await runWrangler("publish --env some-env --legacy-env false"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (some-env) (TIMINGS) Published test-name (some-env) (TIMINGS) @@ -2293,12 +2324,16 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets); await runWrangler("publish --env some-env --legacy-env true"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name-some-env (TIMINGS) Published test-name-some-env (TIMINGS) @@ -2338,11 +2373,7 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Skipping - already uploaded. - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2380,10 +2411,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish --site-include file-1.txt"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2421,10 +2457,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish --site-exclude file-2.txt"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2463,10 +2504,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2505,10 +2551,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2547,10 +2598,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish --site-include file-1.txt"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2589,10 +2645,15 @@ addEventListener('fetch', event => {});` ); await runWrangler("publish --site-exclude file-2.txt"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2633,10 +2694,15 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets.slice(0, 1)); await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + directory-1/file-1.2ca234f380.txt (uploading new version of directory-1/file-1.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading directory-1/file-1.txt... - Uploading as directory-1/file-1.2ca234f380.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2681,10 +2747,15 @@ addEventListener('fetch', event => {});` mockUploadAssetsToKVRequest(kvNamespace.id, assets.slice(2)); await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + .well-known/file-2.5938485188.txt (uploading new version of .well-known/file-2.txt) + Uploading 1 new asset... + Uploaded 100% [1 out of 1]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading .well-known/file-2.txt... - Uploading as .well-known/file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2730,13 +2801,15 @@ addEventListener('fetch', event => {});` `"File too-large-file.txt is too big, it should be under 25 MiB. See https://developers.cloudflare.com/workers/platform/limits#kv-limits"` ); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + large-file.0ea0637a45.txt (uploading new version of large-file.txt)" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading large-file.txt... - Uploading as large-file.0ea0637a45.txt... - Reading too-large-file.txt... - - If you think this is a bug then please create an issue at https://github.com/cloudflare/workers-sdk/issues/new/choose" - `); + " + If you think this is a bug then please create an issue at https://github.com/cloudflare/workers-sdk/issues/new/choose" + `); expect(std.err).toMatchInlineSnapshot(` "X [ERROR] File too-large-file.txt is too big, it should be under 25 MiB. See https://developers.cloudflare.com/workers/platform/limits#kv-limits @@ -2772,7 +2845,9 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); // We expect this to be uploaded in 4 batches - + expect(requests.length).toEqual(4); + // Buckets may be uploaded in any order, so sort them before we assert + requests.sort((a, b) => a.uploads[0].key.localeCompare(b.uploads[0].key)); // The first batch has 11 files expect(requests[0].uploads.length).toEqual(11); // The next batch has 5 files @@ -2790,60 +2865,49 @@ addEventListener('fetch', event => {});` } } - expect(std).toMatchInlineSnapshot(` - Object { - "debug": "", - "err": "", - "info": "", - "out": "Reading file-00.txt... - Uploading as file-00.be5be5dd26.txt... - Reading file-01.txt... - Uploading as file-01.4842d35994.txt... - Reading file-02.txt... - Uploading as file-02.990572ec63.txt... - Reading file-03.txt... - Uploading as file-03.9d7dda9045.txt... - Reading file-04.txt... - Uploading as file-04.2b6fac6382.txt... - Reading file-05.txt... - Uploading as file-05.55762dc758.txt... - Reading file-06.txt... - Uploading as file-06.f408a6b020.txt... - Reading file-07.txt... - Uploading as file-07.64c051715b.txt... - Reading file-08.txt... - Uploading as file-08.d286789adb.txt... - Reading file-09.txt... - Uploading as file-09.6838c183a8.txt... - Reading file-10.txt... - Uploading as file-10.6e03221d2a.txt... - Reading file-11.txt... - Uploading as file-11.37d3fb2eff.txt... - Reading file-12.txt... - Uploading as file-12.b3556942f8.txt... - Reading file-13.txt... - Uploading as file-13.680caf51b1.txt... - Reading file-14.txt... - Uploading as file-14.51e88468f0.txt... - Reading file-15.txt... - Uploading as file-15.8e3fedb394.txt... - Reading file-16.txt... - Uploading as file-16.c81c5e426f.txt... - Reading file-17.txt... - Uploading as file-17.4b2ae3c47b.txt... - Reading file-18.txt... - Uploading as file-18.07f245e02b.txt... - Reading file-19.txt... - Uploading as file-19.f0d69f705d.txt... - ↗️ Done syncing assets + expect(std.debug).toMatchInlineSnapshot(`""`); + expect(std.out).toMatchInlineSnapshot(` + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) https://test-name.test-sub-domain.workers.dev - Current Deployment ID: Galaxy-Class", - "warn": "", - } + Current Deployment ID: Galaxy-Class" + `); + // Mask all but last upload progress message as upload order unknown + // (regexp replaces all single/double-digit percentages, i.e. not 100%) + expect(std.info.replace(/Uploaded \d\d?% \[\d+/g, "Uploaded X% [X")) + .toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-00.be5be5dd26.txt (uploading new version of file-00.txt) + + file-01.4842d35994.txt (uploading new version of file-01.txt) + + file-02.990572ec63.txt (uploading new version of file-02.txt) + + file-03.9d7dda9045.txt (uploading new version of file-03.txt) + + file-04.2b6fac6382.txt (uploading new version of file-04.txt) + + file-05.55762dc758.txt (uploading new version of file-05.txt) + + file-06.f408a6b020.txt (uploading new version of file-06.txt) + + file-07.64c051715b.txt (uploading new version of file-07.txt) + + file-08.d286789adb.txt (uploading new version of file-08.txt) + + file-09.6838c183a8.txt (uploading new version of file-09.txt) + + file-10.6e03221d2a.txt (uploading new version of file-10.txt) + + file-11.37d3fb2eff.txt (uploading new version of file-11.txt) + + file-12.b3556942f8.txt (uploading new version of file-12.txt) + + file-13.680caf51b1.txt (uploading new version of file-13.txt) + + file-14.51e88468f0.txt (uploading new version of file-14.txt) + + file-15.8e3fedb394.txt (uploading new version of file-15.txt) + + file-16.c81c5e426f.txt (uploading new version of file-16.txt) + + file-17.4b2ae3c47b.txt (uploading new version of file-17.txt) + + file-18.07f245e02b.txt (uploading new version of file-18.txt) + + file-19.f0d69f705d.txt (uploading new version of file-19.txt) + Uploading 20 new assets... + Uploaded X% [X out of 20] + Uploaded X% [X out of 20] + Uploaded X% [X out of 20] + Uploaded 100% [20 out of 20]" `); + expect(std.warn).toMatchInlineSnapshot(`""`); + expect(std.err).toMatchInlineSnapshot(`""`); }); it("should error if the asset key is over 512 characters", async () => { @@ -2874,11 +2938,14 @@ addEventListener('fetch', event => {});` `"The asset path key \\"folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/file.3da0d0cd12.txt\\" exceeds the maximum key size limit of 512. See https://developers.cloudflare.com/workers/platform/limits#kv-limits\\","` ); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload..." + `); expect(std.out).toMatchInlineSnapshot(` - "Reading folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/file.txt... - - If you think this is a bug then please create an issue at https://github.com/cloudflare/workers-sdk/issues/new/choose" - `); + " + If you think this is a bug then please create an issue at https://github.com/cloudflare/workers-sdk/issues/new/choose" + `); expect(std.err).toMatchInlineSnapshot(` "X [ERROR] The asset path key \\"folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/folder/file.3da0d0cd12.txt\\" exceeds the maximum key size limit of 512. See https://developers.cloudflare.com/workers/platform/limits#kv-limits\\", @@ -2928,14 +2995,20 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + = file-1.2ca234f380.txt (already uploaded file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + - file-3.somehash.txt (removing as stale) + - file-4.anotherhash.txt (removing as stale) + Uploading 1 new asset... + Skipped uploading 1 existing asset. + Uploaded 100% [1 out of 1] + Removing 2 stale assets..." + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Skipping - already uploaded. - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - Deleting file-3.somehash.txt from the asset store... - Deleting file-4.anotherhash.txt from the asset store... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -2985,12 +3058,16 @@ addEventListener('fetch', event => {});` await runWrangler("publish"); process.chdir("../"); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]" + `); expect(std.out).toMatchInlineSnapshot(` - "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -3028,12 +3105,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -3072,12 +3150,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) @@ -3089,6 +3168,352 @@ addEventListener('fetch', event => {});` } `); }); + + it("should abort other bucket uploads if one bucket upload fails", async () => { + // Write 9 20MiB files, should end up with 3 buckets + const content = "X".repeat(20 * 1024 * 1024); + const assets = Array.from({ length: 9 }, (_, index) => ({ + filePath: `file-${index}.txt`, + content, + })); + + const kvNamespace = { + title: "__test-name-workers_sites_assets", + id: "__test-name-workers_sites_assets-id", + }; + writeWranglerToml({ + main: "./index.js", + site: { + bucket: "assets", + }, + }); + writeWorkerSource(); + writeAssets(assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + + let requestCount = 0; + const bulkUrl = + "*/accounts/:accountId/storage/kv/namespaces/:namespaceId/bulk"; + msw.use( + rest.put(bulkUrl, async (req, res, ctx) => { + expect(req.params.accountId).toEqual("some-account-id"); + expect(req.params.namespaceId).toEqual(kvNamespace.id); + requestCount++; + return res( + ctx.status(500), + ctx.json( + createFetchResult([], false, [ + { code: 1000, message: "Whoops! Something went wrong!" }, + ]) + ) + ); + }) + ); + + await expect( + runWrangler("publish") + ).rejects.toThrowErrorMatchingInlineSnapshot( + `"A request to the Cloudflare API (/accounts/some-account-id/storage/kv/namespaces/__test-name-workers_sites_assets-id/bulk) failed."` + ); + + expect(requestCount).toBeLessThan(3); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-0.f0d69f705d.txt (uploading new version of file-0.txt) + + file-1.f0d69f705d.txt (uploading new version of file-1.txt) + + file-2.f0d69f705d.txt (uploading new version of file-2.txt) + + file-3.f0d69f705d.txt (uploading new version of file-3.txt) + + file-4.f0d69f705d.txt (uploading new version of file-4.txt) + + file-5.f0d69f705d.txt (uploading new version of file-5.txt) + + file-6.f0d69f705d.txt (uploading new version of file-6.txt) + + file-7.f0d69f705d.txt (uploading new version of file-7.txt) + + file-8.f0d69f705d.txt (uploading new version of file-8.txt) + Uploading 9 new assets... + Upload failed, aborting..." + `); + }); + + describe("should truncate diff with over 100 assets unless debug log level set", () => { + beforeEach(() => { + const assets = Array.from({ length: 110 }, (_, index) => ({ + filePath: `file-${`${index}`.padStart(3, "0")}.txt`, + content: "X", + })); + + const kvNamespace = { + title: "__test-name-workers_sites_assets", + id: "__test-name-workers_sites_assets-id", + }; + writeWranglerToml({ + main: "./index.js", + site: { + bucket: "assets", + }, + }); + writeWorkerSource(); + writeAssets(assets); + mockUploadWorkerRequest(); + mockSubDomainRequest(); + mockListKVNamespacesRequest(kvNamespace); + mockKeyListRequest(kvNamespace.id, []); + mockUploadAssetsToKVRequest(kvNamespace.id); + }); + + it("default log level", async () => { + await runWrangler("publish"); + expect(std).toMatchInlineSnapshot(` + Object { + "debug": "", + "err": "", + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-000.010257e8bb.txt (uploading new version of file-000.txt) + + file-001.010257e8bb.txt (uploading new version of file-001.txt) + + file-002.010257e8bb.txt (uploading new version of file-002.txt) + + file-003.010257e8bb.txt (uploading new version of file-003.txt) + + file-004.010257e8bb.txt (uploading new version of file-004.txt) + + file-005.010257e8bb.txt (uploading new version of file-005.txt) + + file-006.010257e8bb.txt (uploading new version of file-006.txt) + + file-007.010257e8bb.txt (uploading new version of file-007.txt) + + file-008.010257e8bb.txt (uploading new version of file-008.txt) + + file-009.010257e8bb.txt (uploading new version of file-009.txt) + + file-010.010257e8bb.txt (uploading new version of file-010.txt) + + file-011.010257e8bb.txt (uploading new version of file-011.txt) + + file-012.010257e8bb.txt (uploading new version of file-012.txt) + + file-013.010257e8bb.txt (uploading new version of file-013.txt) + + file-014.010257e8bb.txt (uploading new version of file-014.txt) + + file-015.010257e8bb.txt (uploading new version of file-015.txt) + + file-016.010257e8bb.txt (uploading new version of file-016.txt) + + file-017.010257e8bb.txt (uploading new version of file-017.txt) + + file-018.010257e8bb.txt (uploading new version of file-018.txt) + + file-019.010257e8bb.txt (uploading new version of file-019.txt) + + file-020.010257e8bb.txt (uploading new version of file-020.txt) + + file-021.010257e8bb.txt (uploading new version of file-021.txt) + + file-022.010257e8bb.txt (uploading new version of file-022.txt) + + file-023.010257e8bb.txt (uploading new version of file-023.txt) + + file-024.010257e8bb.txt (uploading new version of file-024.txt) + + file-025.010257e8bb.txt (uploading new version of file-025.txt) + + file-026.010257e8bb.txt (uploading new version of file-026.txt) + + file-027.010257e8bb.txt (uploading new version of file-027.txt) + + file-028.010257e8bb.txt (uploading new version of file-028.txt) + + file-029.010257e8bb.txt (uploading new version of file-029.txt) + + file-030.010257e8bb.txt (uploading new version of file-030.txt) + + file-031.010257e8bb.txt (uploading new version of file-031.txt) + + file-032.010257e8bb.txt (uploading new version of file-032.txt) + + file-033.010257e8bb.txt (uploading new version of file-033.txt) + + file-034.010257e8bb.txt (uploading new version of file-034.txt) + + file-035.010257e8bb.txt (uploading new version of file-035.txt) + + file-036.010257e8bb.txt (uploading new version of file-036.txt) + + file-037.010257e8bb.txt (uploading new version of file-037.txt) + + file-038.010257e8bb.txt (uploading new version of file-038.txt) + + file-039.010257e8bb.txt (uploading new version of file-039.txt) + + file-040.010257e8bb.txt (uploading new version of file-040.txt) + + file-041.010257e8bb.txt (uploading new version of file-041.txt) + + file-042.010257e8bb.txt (uploading new version of file-042.txt) + + file-043.010257e8bb.txt (uploading new version of file-043.txt) + + file-044.010257e8bb.txt (uploading new version of file-044.txt) + + file-045.010257e8bb.txt (uploading new version of file-045.txt) + + file-046.010257e8bb.txt (uploading new version of file-046.txt) + + file-047.010257e8bb.txt (uploading new version of file-047.txt) + + file-048.010257e8bb.txt (uploading new version of file-048.txt) + + file-049.010257e8bb.txt (uploading new version of file-049.txt) + + file-050.010257e8bb.txt (uploading new version of file-050.txt) + + file-051.010257e8bb.txt (uploading new version of file-051.txt) + + file-052.010257e8bb.txt (uploading new version of file-052.txt) + + file-053.010257e8bb.txt (uploading new version of file-053.txt) + + file-054.010257e8bb.txt (uploading new version of file-054.txt) + + file-055.010257e8bb.txt (uploading new version of file-055.txt) + + file-056.010257e8bb.txt (uploading new version of file-056.txt) + + file-057.010257e8bb.txt (uploading new version of file-057.txt) + + file-058.010257e8bb.txt (uploading new version of file-058.txt) + + file-059.010257e8bb.txt (uploading new version of file-059.txt) + + file-060.010257e8bb.txt (uploading new version of file-060.txt) + + file-061.010257e8bb.txt (uploading new version of file-061.txt) + + file-062.010257e8bb.txt (uploading new version of file-062.txt) + + file-063.010257e8bb.txt (uploading new version of file-063.txt) + + file-064.010257e8bb.txt (uploading new version of file-064.txt) + + file-065.010257e8bb.txt (uploading new version of file-065.txt) + + file-066.010257e8bb.txt (uploading new version of file-066.txt) + + file-067.010257e8bb.txt (uploading new version of file-067.txt) + + file-068.010257e8bb.txt (uploading new version of file-068.txt) + + file-069.010257e8bb.txt (uploading new version of file-069.txt) + + file-070.010257e8bb.txt (uploading new version of file-070.txt) + + file-071.010257e8bb.txt (uploading new version of file-071.txt) + + file-072.010257e8bb.txt (uploading new version of file-072.txt) + + file-073.010257e8bb.txt (uploading new version of file-073.txt) + + file-074.010257e8bb.txt (uploading new version of file-074.txt) + + file-075.010257e8bb.txt (uploading new version of file-075.txt) + + file-076.010257e8bb.txt (uploading new version of file-076.txt) + + file-077.010257e8bb.txt (uploading new version of file-077.txt) + + file-078.010257e8bb.txt (uploading new version of file-078.txt) + + file-079.010257e8bb.txt (uploading new version of file-079.txt) + + file-080.010257e8bb.txt (uploading new version of file-080.txt) + + file-081.010257e8bb.txt (uploading new version of file-081.txt) + + file-082.010257e8bb.txt (uploading new version of file-082.txt) + + file-083.010257e8bb.txt (uploading new version of file-083.txt) + + file-084.010257e8bb.txt (uploading new version of file-084.txt) + + file-085.010257e8bb.txt (uploading new version of file-085.txt) + + file-086.010257e8bb.txt (uploading new version of file-086.txt) + + file-087.010257e8bb.txt (uploading new version of file-087.txt) + + file-088.010257e8bb.txt (uploading new version of file-088.txt) + + file-089.010257e8bb.txt (uploading new version of file-089.txt) + + file-090.010257e8bb.txt (uploading new version of file-090.txt) + + file-091.010257e8bb.txt (uploading new version of file-091.txt) + + file-092.010257e8bb.txt (uploading new version of file-092.txt) + + file-093.010257e8bb.txt (uploading new version of file-093.txt) + + file-094.010257e8bb.txt (uploading new version of file-094.txt) + + file-095.010257e8bb.txt (uploading new version of file-095.txt) + + file-096.010257e8bb.txt (uploading new version of file-096.txt) + + file-097.010257e8bb.txt (uploading new version of file-097.txt) + + file-098.010257e8bb.txt (uploading new version of file-098.txt) + + file-099.010257e8bb.txt (uploading new version of file-099.txt) + (truncating changed assets log, set \`WRANGLER_LOG=debug\` environment variable to see full diff) + Uploading 110 new assets... + Uploaded 100% [110 out of 110]", + "out": "↗️ Done syncing assets + Total Upload: xx KiB / gzip: xx KiB + Uploaded test-name (TIMINGS) + Published test-name (TIMINGS) + https://test-name.test-sub-domain.workers.dev + Current Deployment ID: Galaxy-Class", + "warn": "", + } + `); + }); + + it("debug log level", async () => { + logger.loggerLevel = "debug"; + await runWrangler("publish"); + + const diffRegexp = /^ [+=-]/; + const diff = std.debug + .split("\n") + .filter((line) => diffRegexp.test(line)) + .join("\n"); + expect(diff).toMatchInlineSnapshot(` + " + file-000.010257e8bb.txt (uploading new version of file-000.txt) + + file-001.010257e8bb.txt (uploading new version of file-001.txt) + + file-002.010257e8bb.txt (uploading new version of file-002.txt) + + file-003.010257e8bb.txt (uploading new version of file-003.txt) + + file-004.010257e8bb.txt (uploading new version of file-004.txt) + + file-005.010257e8bb.txt (uploading new version of file-005.txt) + + file-006.010257e8bb.txt (uploading new version of file-006.txt) + + file-007.010257e8bb.txt (uploading new version of file-007.txt) + + file-008.010257e8bb.txt (uploading new version of file-008.txt) + + file-009.010257e8bb.txt (uploading new version of file-009.txt) + + file-010.010257e8bb.txt (uploading new version of file-010.txt) + + file-011.010257e8bb.txt (uploading new version of file-011.txt) + + file-012.010257e8bb.txt (uploading new version of file-012.txt) + + file-013.010257e8bb.txt (uploading new version of file-013.txt) + + file-014.010257e8bb.txt (uploading new version of file-014.txt) + + file-015.010257e8bb.txt (uploading new version of file-015.txt) + + file-016.010257e8bb.txt (uploading new version of file-016.txt) + + file-017.010257e8bb.txt (uploading new version of file-017.txt) + + file-018.010257e8bb.txt (uploading new version of file-018.txt) + + file-019.010257e8bb.txt (uploading new version of file-019.txt) + + file-020.010257e8bb.txt (uploading new version of file-020.txt) + + file-021.010257e8bb.txt (uploading new version of file-021.txt) + + file-022.010257e8bb.txt (uploading new version of file-022.txt) + + file-023.010257e8bb.txt (uploading new version of file-023.txt) + + file-024.010257e8bb.txt (uploading new version of file-024.txt) + + file-025.010257e8bb.txt (uploading new version of file-025.txt) + + file-026.010257e8bb.txt (uploading new version of file-026.txt) + + file-027.010257e8bb.txt (uploading new version of file-027.txt) + + file-028.010257e8bb.txt (uploading new version of file-028.txt) + + file-029.010257e8bb.txt (uploading new version of file-029.txt) + + file-030.010257e8bb.txt (uploading new version of file-030.txt) + + file-031.010257e8bb.txt (uploading new version of file-031.txt) + + file-032.010257e8bb.txt (uploading new version of file-032.txt) + + file-033.010257e8bb.txt (uploading new version of file-033.txt) + + file-034.010257e8bb.txt (uploading new version of file-034.txt) + + file-035.010257e8bb.txt (uploading new version of file-035.txt) + + file-036.010257e8bb.txt (uploading new version of file-036.txt) + + file-037.010257e8bb.txt (uploading new version of file-037.txt) + + file-038.010257e8bb.txt (uploading new version of file-038.txt) + + file-039.010257e8bb.txt (uploading new version of file-039.txt) + + file-040.010257e8bb.txt (uploading new version of file-040.txt) + + file-041.010257e8bb.txt (uploading new version of file-041.txt) + + file-042.010257e8bb.txt (uploading new version of file-042.txt) + + file-043.010257e8bb.txt (uploading new version of file-043.txt) + + file-044.010257e8bb.txt (uploading new version of file-044.txt) + + file-045.010257e8bb.txt (uploading new version of file-045.txt) + + file-046.010257e8bb.txt (uploading new version of file-046.txt) + + file-047.010257e8bb.txt (uploading new version of file-047.txt) + + file-048.010257e8bb.txt (uploading new version of file-048.txt) + + file-049.010257e8bb.txt (uploading new version of file-049.txt) + + file-050.010257e8bb.txt (uploading new version of file-050.txt) + + file-051.010257e8bb.txt (uploading new version of file-051.txt) + + file-052.010257e8bb.txt (uploading new version of file-052.txt) + + file-053.010257e8bb.txt (uploading new version of file-053.txt) + + file-054.010257e8bb.txt (uploading new version of file-054.txt) + + file-055.010257e8bb.txt (uploading new version of file-055.txt) + + file-056.010257e8bb.txt (uploading new version of file-056.txt) + + file-057.010257e8bb.txt (uploading new version of file-057.txt) + + file-058.010257e8bb.txt (uploading new version of file-058.txt) + + file-059.010257e8bb.txt (uploading new version of file-059.txt) + + file-060.010257e8bb.txt (uploading new version of file-060.txt) + + file-061.010257e8bb.txt (uploading new version of file-061.txt) + + file-062.010257e8bb.txt (uploading new version of file-062.txt) + + file-063.010257e8bb.txt (uploading new version of file-063.txt) + + file-064.010257e8bb.txt (uploading new version of file-064.txt) + + file-065.010257e8bb.txt (uploading new version of file-065.txt) + + file-066.010257e8bb.txt (uploading new version of file-066.txt) + + file-067.010257e8bb.txt (uploading new version of file-067.txt) + + file-068.010257e8bb.txt (uploading new version of file-068.txt) + + file-069.010257e8bb.txt (uploading new version of file-069.txt) + + file-070.010257e8bb.txt (uploading new version of file-070.txt) + + file-071.010257e8bb.txt (uploading new version of file-071.txt) + + file-072.010257e8bb.txt (uploading new version of file-072.txt) + + file-073.010257e8bb.txt (uploading new version of file-073.txt) + + file-074.010257e8bb.txt (uploading new version of file-074.txt) + + file-075.010257e8bb.txt (uploading new version of file-075.txt) + + file-076.010257e8bb.txt (uploading new version of file-076.txt) + + file-077.010257e8bb.txt (uploading new version of file-077.txt) + + file-078.010257e8bb.txt (uploading new version of file-078.txt) + + file-079.010257e8bb.txt (uploading new version of file-079.txt) + + file-080.010257e8bb.txt (uploading new version of file-080.txt) + + file-081.010257e8bb.txt (uploading new version of file-081.txt) + + file-082.010257e8bb.txt (uploading new version of file-082.txt) + + file-083.010257e8bb.txt (uploading new version of file-083.txt) + + file-084.010257e8bb.txt (uploading new version of file-084.txt) + + file-085.010257e8bb.txt (uploading new version of file-085.txt) + + file-086.010257e8bb.txt (uploading new version of file-086.txt) + + file-087.010257e8bb.txt (uploading new version of file-087.txt) + + file-088.010257e8bb.txt (uploading new version of file-088.txt) + + file-089.010257e8bb.txt (uploading new version of file-089.txt) + + file-090.010257e8bb.txt (uploading new version of file-090.txt) + + file-091.010257e8bb.txt (uploading new version of file-091.txt) + + file-092.010257e8bb.txt (uploading new version of file-092.txt) + + file-093.010257e8bb.txt (uploading new version of file-093.txt) + + file-094.010257e8bb.txt (uploading new version of file-094.txt) + + file-095.010257e8bb.txt (uploading new version of file-095.txt) + + file-096.010257e8bb.txt (uploading new version of file-096.txt) + + file-097.010257e8bb.txt (uploading new version of file-097.txt) + + file-098.010257e8bb.txt (uploading new version of file-098.txt) + + file-099.010257e8bb.txt (uploading new version of file-099.txt) + + file-100.010257e8bb.txt (uploading new version of file-100.txt) + + file-101.010257e8bb.txt (uploading new version of file-101.txt) + + file-102.010257e8bb.txt (uploading new version of file-102.txt) + + file-103.010257e8bb.txt (uploading new version of file-103.txt) + + file-104.010257e8bb.txt (uploading new version of file-104.txt) + + file-105.010257e8bb.txt (uploading new version of file-105.txt) + + file-106.010257e8bb.txt (uploading new version of file-106.txt) + + file-107.010257e8bb.txt (uploading new version of file-107.txt) + + file-108.010257e8bb.txt (uploading new version of file-108.txt) + + file-109.010257e8bb.txt (uploading new version of file-109.txt)" + `); + expect(std.info).toMatchInlineSnapshot(` + "Fetching list of already uploaded assets... + Building list of assets to upload... + Uploading 110 new assets... + Uploaded 100% [110 out of 110]" + `); + }); + }); }); describe("workers_dev setting", () => { @@ -6656,12 +7081,13 @@ addEventListener('fetch', event => {});` Object { "debug": "", "err": "", - "info": "", - "out": "Reading file-1.txt... - Uploading as file-1.2ca234f380.txt... - Reading file-2.txt... - Uploading as file-2.5938485188.txt... - ↗️ Done syncing assets + "info": "Fetching list of already uploaded assets... + Building list of assets to upload... + + file-1.2ca234f380.txt (uploading new version of file-1.txt) + + file-2.5938485188.txt (uploading new version of file-2.txt) + Uploading 2 new assets... + Uploaded 100% [2 out of 2]", + "out": "↗️ Done syncing assets Total Upload: xx KiB / gzip: xx KiB Uploaded test-name (TIMINGS) Published test-name (TIMINGS) diff --git a/packages/wrangler/src/kv/helpers.ts b/packages/wrangler/src/kv/helpers.ts index 0f46fb126853..a1edb5a7b955 100644 --- a/packages/wrangler/src/kv/helpers.ts +++ b/packages/wrangler/src/kv/helpers.ts @@ -8,7 +8,7 @@ import type { Config } from "../config"; const API_MAX = 10000; // The const below are halved from the API's true capacity to help avoid // hammering it with large requests. -const BATCH_KEY_MAX = API_MAX / 2; +export const BATCH_KEY_MAX = API_MAX / 2; type KvArgs = { binding?: string; @@ -256,7 +256,7 @@ export async function deleteKVKeyValue( /** * Formatter for converting e.g. 5328 --> 5,328 */ -const formatNumber = new Intl.NumberFormat("en-US", { +export const formatNumber = new Intl.NumberFormat("en-US", { notation: "standard", }).format; @@ -279,7 +279,8 @@ export async function putKVBulkKeyValue( accountId: string, namespaceId: string, keyValues: KeyValue[], - quiet = false + quiet = false, + abortSignal?: AbortSignal ) { for (let index = 0; index < keyValues.length; index += BATCH_KEY_MAX) { if (!quiet && keyValues.length > BATCH_KEY_MAX) { @@ -292,7 +293,9 @@ export async function putKVBulkKeyValue( method: "PUT", body: JSON.stringify(keyValues.slice(index, index + BATCH_KEY_MAX)), headers: { "Content-Type": "application/json" }, - } + }, + undefined, + abortSignal ); } diff --git a/packages/wrangler/src/sites.ts b/packages/wrangler/src/sites.ts index 0ec3b2fb0804..d6170a873631 100644 --- a/packages/wrangler/src/sites.ts +++ b/packages/wrangler/src/sites.ts @@ -1,6 +1,7 @@ import assert from "node:assert"; import { readdir, readFile, stat } from "node:fs/promises"; import * as path from "node:path"; +import chalk from "chalk"; import ignore from "ignore"; import xxhash from "xxhash-wasm"; import { @@ -9,8 +10,10 @@ import { listKVNamespaces, putKVBulkKeyValue, deleteKVBulkKeyValue, + BATCH_KEY_MAX, + formatNumber, } from "./kv/helpers"; -import { logger } from "./logger"; +import { logger, LOGGER_LEVELS } from "./logger"; import type { Config } from "./config"; import type { KeyValue } from "./kv/helpers"; import type { XXHashAPI } from "xxhash-wasm"; @@ -92,6 +95,15 @@ async function createKVNamespaceIfNotAlreadyExisting( }; } +const MAX_DIFF_LINES = 100; +const MAX_BUCKET_SIZE = 98 * 1000 * 1000; +const MAX_BUCKET_KEYS = BATCH_KEY_MAX; +const MAX_BATCH_OPERATIONS = 5; + +function pluralise(count: number) { + return count === 1 ? "" : "s"; +} + /** * Upload the assets found within the `dirPath` directory to the sites assets KV namespace for * the worker given by `scriptName`. @@ -116,13 +128,13 @@ export async function syncAssets( if (siteAssets === undefined) { return { manifest: undefined, namespace: undefined }; } - if (dryRun) { logger.log("(Note: doing a dry run, not uploading or deleting anything.)"); return { manifest: undefined, namespace: undefined }; } assert(accountId, "Missing accountId"); + // Create assets namespace if it doesn't exist const title = `__${scriptName}-workers_sites_assets${ preview ? "_preview" : "" }`; @@ -131,55 +143,81 @@ export async function syncAssets( title, accountId ); - - // let's get all the keys in this namespace + // Get all existing keys in asset namespace + logger.info("Fetching list of already uploaded assets..."); const namespaceKeysResponse = await listKVNamespaceKeys(accountId, namespace); const namespaceKeys = new Set(namespaceKeysResponse.map((x) => x.name)); - const manifest: Record = {}; - - // A batch of uploads where each bucket has to be less than 98mb - const uploadBuckets: KeyValue[][] = []; - // The "live" bucket that we'll keep filling until it's just below 98mb - let uploadBucket: KeyValue[] = []; - // A size counter for the live bucket - let uploadBucketSize = 0; - - const include = createPatternMatcher(siteAssets.includePatterns, false); - const exclude = createPatternMatcher(siteAssets.excludePatterns, true); - const hasher = await xxhash(); - const assetDirectory = path.join( siteAssets.baseDirectory, siteAssets.assetDirectory ); + const include = createPatternMatcher(siteAssets.includePatterns, false); + const exclude = createPatternMatcher(siteAssets.excludePatterns, true); + const hasher = await xxhash(); + + // Find and validate all assets before we make any changes (can't store base64 + // contents in memory for upload as users may have *lots* of files, and we + // don't want to OOM: https://github.com/cloudflare/workers-sdk/issues/2223) + + const manifest: Record = {}; + type PathKey = [path: string, key: string]; + // A batch of uploads where each bucket has to be less than 100 MiB and + // contain less than 10,000 keys (although we limit to 98 MB and 5000 keys) + const uploadBuckets: PathKey[][] = []; + // The "live" bucket we'll keep filling until it's just below the size limit + let uploadBucket: PathKey[] = []; + // Current size of the live bucket in bytes (just base64 encoded values) + let uploadBucketSize = 0; + + let uploadCount = 0; + let skipCount = 0; + + // Always log the first MAX_DIFF_LINES lines, then require the debug log level + let diffCount = 0; + function logDiff(line: string) { + const level = logger.loggerLevel; + if (LOGGER_LEVELS[level] >= LOGGER_LEVELS.debug) { + // If we're logging as debug level, we want *all* diff lines to be logged + // at debug level, not just the first MAX_DIFF_LINES + logger.debug(line); + } else if (diffCount < MAX_DIFF_LINES) { + // Otherwise, log the first MAX_DIFF_LINES diffs at info level... + logger.info(line); + } else if (diffCount === MAX_DIFF_LINES) { + // ...and warn when we start to truncate it + const msg = + " (truncating changed assets log, set `WRANGLER_LOG=debug` environment variable to see full diff)"; + logger.info(chalk.dim(msg)); + } + diffCount++; + } + + logger.info("Building list of assets to upload..."); for await (const absAssetFile of getFilesInFolder(assetDirectory)) { const assetFile = path.relative(assetDirectory, absAssetFile); - if (!include(assetFile)) { - continue; - } - if (exclude(assetFile)) { - continue; - } + if (!include(assetFile) || exclude(assetFile)) continue; - logger.log(`Reading ${assetFile}...`); const content = await readFile(absAssetFile, "base64"); - await validateAssetSize(absAssetFile, assetFile); - // while KV accepts files that are 25 MiB **before** b64 encoding + // While KV accepts files that are 25 MiB **before** b64 encoding // the overall bucket size must be below 100 MB **after** b64 encoding - const assetSize = Buffer.from(content).length; + const assetSize = Buffer.byteLength(content); + await validateAssetSize(absAssetFile, assetFile); const assetKey = hashAsset(hasher, assetFile, content); validateAssetKey(assetKey); - // now put each of the files into kv if (!namespaceKeys.has(assetKey)) { - logger.log(`Uploading as ${assetKey}...`); - - // Check if adding this asset to the bucket would - // push it over the 98 MiB limit KV bulk API limit - if (uploadBucketSize + assetSize > 98 * 1000 * 1000) { - // If so, move the current bucket into the batch, - // and reset the counter/bucket + logDiff( + chalk.green(` + ${assetKey} (uploading new version of ${assetFile})`) + ); + + // Check if adding this asset to the bucket would push it over the KV + // bulk API limits + if ( + uploadBucketSize + assetSize > MAX_BUCKET_SIZE || + uploadBucket.length + 1 > MAX_BUCKET_KEYS + ) { + // If so, record the current bucket and reset it uploadBuckets.push(uploadBucket); uploadBucketSize = 0; uploadBucket = []; @@ -187,13 +225,11 @@ export async function syncAssets( // Update the bucket and the size counter uploadBucketSize += assetSize; - uploadBucket.push({ - key: assetKey, - value: content, - base64: true, - }); + uploadBucket.push([absAssetFile, assetKey]); + uploadCount++; } else { - logger.log(`Skipping - already uploaded.`); + logDiff(chalk.dim(` = ${assetKey} (already uploaded ${assetFile})`)); + skipCount++; } // Remove the key from the set so we know what we've already uploaded @@ -203,23 +239,99 @@ export async function syncAssets( const manifestKey = urlSafe(path.relative(assetDirectory, absAssetFile)); manifest[manifestKey] = assetKey; } + // Add the last (potentially only or empty) bucket to the batch + if (uploadBucket.length > 0) uploadBuckets.push(uploadBucket); - // Add the last (potentially only) bucket to the batch - uploadBuckets.push(uploadBucket); - - // keys now contains all the files we're deleting for (const key of namespaceKeys) { - logger.log(`Deleting ${key} from the asset store...`); + logDiff(chalk.red(` - ${key} (removing as stale)`)); + } + + // Upload new assets, with 5 concurrent uploaders + if (uploadCount > 0) { + const s = pluralise(uploadCount); + logger.info(`Uploading ${formatNumber(uploadCount)} new asset${s}...`); + } + if (skipCount > 0) { + const s = pluralise(skipCount); + logger.info( + `Skipped uploading ${formatNumber(skipCount)} existing asset${s}.` + ); } + let uploadedCount = 0; + const controller = new AbortController(); + const uploaders = Array.from(Array(MAX_BATCH_OPERATIONS)).map(async () => { + while (!controller.signal.aborted) { + // Get the next bucket to upload. If there is none, stop this uploader. + // JavaScript is single(ish)-threaded, so we don't need to worry about + // parallel access here. + const nextBucket = uploadBuckets.shift(); + if (nextBucket === undefined) break; + + // Read all files in the bucket as base64 + // TODO(perf): consider streaming the bulk upload body, rather than + // buffering all base64 contents then JSON-stringifying. This probably + // doesn't matter *too* much: we know buckets will be about 100MB, so + // with 5 uploaders, we could load about 500MB into memory (+ extra + // object keys/tags/copies/etc). + const bucket: KeyValue[] = []; + for (const [absAssetFile, assetKey] of nextBucket) { + bucket.push({ + key: assetKey, + value: await readFile(absAssetFile, "base64"), + base64: true, + }); + if (controller.signal.aborted) break; + } - // upload each bucket in parallel - const bucketsToPut = []; - for (const bucket of uploadBuckets) { - bucketsToPut.push(putKVBulkKeyValue(accountId, namespace, bucket)); + // Upload the bucket to the KV namespace, suppressing logs, we do our own + try { + await putKVBulkKeyValue( + accountId, + namespace, + bucket, + /* quiet */ true, + controller.signal + ); + } catch (e) { + // https://developer.mozilla.org/en-US/docs/Web/API/DOMException#error_names + // https://github.com/nodejs/undici/blob/a3efc9814447001a43a976f1c64adc41995df7e3/lib/core/errors.js#L89 + if ( + typeof e === "object" && + e !== null && + "name" in e && + // @ts-expect-error `e.name` should be typed `unknown`, fixed in + // TypeScript 4.9 + e.name === "AbortError" + ) { + break; + } + throw e; + } + uploadedCount += nextBucket.length; + const percent = Math.floor((100 * uploadedCount) / uploadCount); + logger.info( + `Uploaded ${percent}% [${formatNumber( + uploadedCount + )} out of ${formatNumber(uploadCount)}]` + ); + } + }); + try { + // Wait for all uploaders to complete, or one to fail + await Promise.all(uploaders); + } catch (e) { + // If any uploader fails, abort the others + logger.info(`Upload failed, aborting...`); + controller.abort(); + throw e; } - await Promise.all(bucketsToPut); - // then delete all the assets that aren't used anymore + // Delete stale assets + const deleteCount = namespaceKeys.size; + if (deleteCount > 0) { + const s = pluralise(deleteCount); + logger.info(`Removing ${formatNumber(deleteCount)} stale asset${s}...`); + } await deleteKVBulkKeyValue(accountId, namespace, Array.from(namespaceKeys)); logger.log("↗️ Done syncing assets");