diff --git a/.storybook/preview.js b/.storybook/preview.js
index e302a9846..550211f71 100644
--- a/.storybook/preview.js
+++ b/.storybook/preview.js
@@ -17,7 +17,7 @@ ansiHTML.setColors({
chalk.enabled = true;
chalk.level = 3;
-const style = {
+const codeStyle = {
display: 'inline-block',
margin: 0,
padding: '1rem',
@@ -28,6 +28,14 @@ const style = {
color: '#c0c4cd',
};
+const htmlStyle = {
+ fontFamily: "'Nunito Sans', sans-serif",
+ fontSize: 14,
+ lineHeight: '1',
+ color: '#5C6870',
+ padding: 20,
+};
+
export const parameters = {
layout: 'fullscreen',
};
@@ -36,7 +44,11 @@ export const decorators = [
(storyFn, { kind }) => {
if (kind.startsWith('CLI/')) {
document.body.style.backgroundColor = '#16242c';
- return
;
+ return
;
+ }
+ if (kind.startsWith('HTML/')) {
+ document.body.style.backgroundColor = '#F6F9FC';
+ return
;
}
document.body.style.backgroundColor = 'paleturquoise';
return storyFn();
diff --git a/action-src/main.ts b/action-src/main.ts
index 96b6a7229..c4f6a9efc 100755
--- a/action-src/main.ts
+++ b/action-src/main.ts
@@ -39,7 +39,6 @@ const getBuildInfo = (event: typeof context) => {
}
case 'workflow_run': {
const { repository } = event.payload;
- // eslint-disable-next-line @typescript-eslint/naming-convention
const { head_sha, head_branch } = event.payload.workflow_run;
return {
@@ -76,22 +75,6 @@ const getBuildInfo = (event: typeof context) => {
}
};
-interface Output {
- code: number;
- url: string;
- buildUrl: string;
- storybookUrl: string;
- specCount: number;
- componentCount: number;
- testCount: number;
- changeCount: number;
- errorCount: number;
- interactionTestFailuresCount: number;
- actualTestCount: number;
- actualCaptureCount: number;
- inheritedCaptureCount: number;
-}
-
async function run() {
const { sha, branch, slug, mergeCommit } = getBuildInfo(context) || {};
if (!sha || !branch || !slug) return;
@@ -123,6 +106,7 @@ async function run() {
const storybookConfigDir = getInput('storybookConfigDir');
const traceChanged = getInput('traceChanged');
const untraced = getInput('untraced');
+ const uploadMetadata = getInput('uploadMetadata');
const workingDir = getInput('workingDir') || getInput('workingDirectory');
const zip = getInput('zip');
const junitReport = getInput('junitReport');
@@ -165,6 +149,7 @@ async function run() {
storybookConfigDir: maybe(storybookConfigDir),
traceChanged: maybe(traceChanged),
untraced: maybe(untraced),
+ uploadMetadata: maybe(uploadMetadata, false),
zip: maybe(zip, false),
junitReport: maybe(junitReport, false),
},
diff --git a/action.yml b/action.yml
index 1f4e2bc7a..ea50b29d0 100755
--- a/action.yml
+++ b/action.yml
@@ -87,6 +87,9 @@ inputs:
untraced:
description: 'Disregard these files and their dependencies when tracing dependent stories for TurboSnap'
required: false
+ uploadMetadata:
+ description: 'Upload Chromatic metadata files as part of the published Storybook'
+ required: false
workingDir:
description: 'Working directory for the package.json file'
required: false
diff --git a/bin-src/trim-stats-file.ts b/bin-src/trim-stats-file.ts
index 7798cb405..9bcd4ae7f 100644
--- a/bin-src/trim-stats-file.ts
+++ b/bin-src/trim-stats-file.ts
@@ -45,6 +45,7 @@ export async function main([statsFile = './storybook-static/preview-stats.json']
);
console.log(`Wrote ${targetFile}`);
+ return targetFile;
} catch (err) {
console.error(err);
}
diff --git a/node-src/index.test.ts b/node-src/index.test.ts
index bc73a7a0e..cc70cc303 100644
--- a/node-src/index.test.ts
+++ b/node-src/index.test.ts
@@ -12,7 +12,7 @@ import { DNSResolveAgent } from './io/getDNSResolveAgent';
import getEnv from './lib/getEnv';
import parseArgs from './lib/parseArgs';
import TestLogger from './lib/testLogger';
-import uploadFiles from './lib/uploadFiles';
+import { uploadFiles } from './lib/uploadFiles';
import { writeChromaticDiagnostics } from './lib/writeChromaticDiagnostics';
import { Context } from './types';
@@ -443,14 +443,16 @@ it('calls out to npm build script passed and uploads files', async () => {
{
contentLength: 42,
contentType: 'text/html',
- path: expect.stringMatching(/\/iframe\.html$/),
- url: 'https://cdn.example.com/iframe.html',
+ localPath: expect.stringMatching(/\/iframe\.html$/),
+ targetPath: 'iframe.html',
+ targetUrl: 'https://cdn.example.com/iframe.html',
},
{
contentLength: 42,
contentType: 'text/html',
- path: expect.stringMatching(/\/index\.html$/),
- url: 'https://cdn.example.com/index.html',
+ localPath: expect.stringMatching(/\/index\.html$/),
+ targetPath: 'index.html',
+ targetUrl: 'https://cdn.example.com/index.html',
},
],
expect.any(Function)
@@ -468,14 +470,16 @@ it('skips building and uploads directly with storybook-build-dir', async () => {
{
contentLength: 42,
contentType: 'text/html',
- path: expect.stringMatching(/\/iframe\.html$/),
- url: 'https://cdn.example.com/iframe.html',
+ localPath: expect.stringMatching(/\/iframe\.html$/),
+ targetPath: 'iframe.html',
+ targetUrl: 'https://cdn.example.com/iframe.html',
},
{
contentLength: 42,
contentType: 'text/html',
- path: expect.stringMatching(/\/index\.html$/),
- url: 'https://cdn.example.com/index.html',
+ localPath: expect.stringMatching(/\/index\.html$/),
+ targetPath: 'index.html',
+ targetUrl: 'https://cdn.example.com/index.html',
},
],
expect.any(Function)
@@ -646,7 +650,7 @@ it('should write context to chromatic-diagnostics.json if --diagnostics is passe
diagnostics: true,
}),
options: expect.objectContaining({
- projectToken: 'asdf1234',
+ projectToken: undefined, // redacted
}),
}),
{ spaces: 2 }
diff --git a/node-src/index.ts b/node-src/index.ts
index 09e2d0b3c..01c291c42 100644
--- a/node-src/index.ts
+++ b/node-src/index.ts
@@ -16,6 +16,7 @@ import getOptions from './lib/getOptions';
import { createLogger } from './lib/log';
import parseArgs from './lib/parseArgs';
import { exitCodes, setExitCode } from './lib/setExitCode';
+import { uploadMetadataFiles } from './lib/uploadMetadataFiles';
import { rewriteErrorMessage } from './lib/utils';
import { writeChromaticDiagnostics } from './lib/writeChromaticDiagnostics';
import getTasks from './tasks';
@@ -175,9 +176,13 @@ export async function runAll(ctx: InitialContext) {
await checkPackageJson(ctx);
}
- if (ctx.options.diagnostics) {
+ if (ctx.flags?.diagnostics || ctx.extraOptions?.diagnostics) {
await writeChromaticDiagnostics(ctx);
}
+
+ if (ctx.flags?.uploadMetadata || ctx.extraOptions?.uploadMetadata) {
+ await uploadMetadataFiles(ctx);
+ }
}
async function runBuild(ctx: Context) {
diff --git a/node-src/lib/LoggingRenderer.ts b/node-src/lib/LoggingRenderer.ts
new file mode 100644
index 000000000..d430cee58
--- /dev/null
+++ b/node-src/lib/LoggingRenderer.ts
@@ -0,0 +1,35 @@
+import UpdateRenderer from 'listr-update-renderer';
+
+export default class LoggingRenderer {
+ tasks;
+ options;
+ updateRenderer;
+
+ constructor(tasks: any, options: any) {
+ this.tasks = tasks;
+ this.options = options;
+ this.updateRenderer = new UpdateRenderer(tasks, options);
+ }
+
+ static get nonTTY() {
+ return false;
+ }
+
+ render() {
+ this.updateRenderer.render();
+ for (const task of this.tasks) {
+ let lastData;
+ task.subscribe((event) => {
+ if (event.type === 'TITLE') this.options.log.file(`${task.title}`);
+ if (event.type === 'DATA' && lastData !== event.data) {
+ lastData = event.data;
+ this.options.log.file(` → ${event.data}`);
+ }
+ });
+ }
+ }
+
+ end() {
+ this.updateRenderer.end();
+ }
+}
diff --git a/node-src/lib/NonTTYRenderer.ts b/node-src/lib/NonTTYRenderer.ts
index 7c5ee4add..f371c1d90 100644
--- a/node-src/lib/NonTTYRenderer.ts
+++ b/node-src/lib/NonTTYRenderer.ts
@@ -1,6 +1,5 @@
export default class NonTTYRenderer {
tasks;
-
options;
constructor(tasks: any, options: any) {
diff --git a/node-src/lib/compress.test.ts b/node-src/lib/compress.test.ts
index ca0869c52..7172733f2 100644
--- a/node-src/lib/compress.test.ts
+++ b/node-src/lib/compress.test.ts
@@ -13,11 +13,8 @@ afterEach(() => {
mockFs.restore();
});
-const testContext = {
- sourceDir: '/chromatic-tmp',
- fileInfo: { paths: ['file1'] },
- log: new TestLogger(),
-} as any;
+const testContext = { sourceDir: '/chromatic-tmp', log: new TestLogger() } as any;
+const files = [{ localPath: '/chromatic-tmp/file1', targetPath: 'file1', contentLength: 1 }];
describe('makeZipFile', () => {
it('adds files to an archive', async () => {
@@ -27,14 +24,14 @@ describe('makeZipFile', () => {
},
});
- const result = await makeZipFile(testContext);
+ const result = await makeZipFile(testContext, files);
expect(existsSync(result.path)).toBeTruthy();
expect(result.size).toBeGreaterThan(0);
});
it('rejects on error signals', () => {
- return expect(makeZipFile(testContext)).rejects.toThrow(
+ return expect(makeZipFile(testContext, files)).rejects.toThrow(
`ENOENT: no such file or directory, open '/chromatic-tmp/file1'`
);
});
diff --git a/node-src/lib/compress.ts b/node-src/lib/compress.ts
index e34abe1bc..b19d00d5c 100644
--- a/node-src/lib/compress.ts
+++ b/node-src/lib/compress.ts
@@ -1,14 +1,13 @@
import archiver from 'archiver';
import { createReadStream, createWriteStream } from 'fs';
-import { join } from 'path';
import { file as tempFile } from 'tmp-promise';
-import { Context } from '../types';
-export default async function makeZipFile(ctx: Context) {
+import { Context, FileDesc } from '../types';
+
+export default async function makeZipFile(ctx: Context, files: FileDesc[]) {
const archive = archiver('zip', { zlib: { level: 9 } });
const tmp = await tempFile({ postfix: '.zip' });
const sink = createWriteStream(null, { fd: tmp.fd });
- const { paths } = ctx.fileInfo;
return new Promise<{ path: string; size: number }>((resolve, reject) => {
sink.on('close', () => {
@@ -24,10 +23,9 @@ export default async function makeZipFile(ctx: Context) {
});
archive.pipe(sink);
- paths.forEach((path) => {
- const fullPath = join(ctx.sourceDir, path);
- ctx.log.debug({ fullPath }, 'Adding file to zip archive');
- archive.append(createReadStream(fullPath), { name: path });
+ files.forEach(({ localPath, targetPath: name }) => {
+ ctx.log.debug({ name }, 'Adding file to zip archive');
+ archive.append(createReadStream(localPath), { name });
});
ctx.log.debug('Finalizing zip archive');
diff --git a/node-src/lib/getConfiguration.ts b/node-src/lib/getConfiguration.ts
index f8239f591..02af513bf 100644
--- a/node-src/lib/getConfiguration.ts
+++ b/node-src/lib/getConfiguration.ts
@@ -29,6 +29,7 @@ const configurationSchema = z
storybookBuildDir: z.string(),
storybookBaseDir: z.string(),
storybookConfigDir: z.string(),
+ uploadMetadata: z.boolean(),
})
.partial()
.strict();
diff --git a/node-src/lib/getOptions.ts b/node-src/lib/getOptions.ts
index 658a3370d..2e25e3230 100644
--- a/node-src/lib/getOptions.ts
+++ b/node-src/lib/getOptions.ts
@@ -77,6 +77,7 @@ export default function getOptions({
branchName: undefined,
patchHeadRef: undefined,
patchBaseRef: undefined,
+ uploadMetadata: undefined,
};
const [patchHeadRef, patchBaseRef] = (flags.patchBuild || '').split('...').filter(Boolean);
@@ -123,6 +124,7 @@ export default function getOptions({
branchName,
patchHeadRef,
patchBaseRef,
+ uploadMetadata: flags.uploadMetadata,
});
const options: Options = {
diff --git a/node-src/lib/getStorybookMetadata.ts b/node-src/lib/getStorybookMetadata.ts
index bd74e814f..d2eae25ca 100644
--- a/node-src/lib/getStorybookMetadata.ts
+++ b/node-src/lib/getStorybookMetadata.ts
@@ -195,6 +195,13 @@ export const findBuilder = async (mainConfig, v7) => {
]);
};
+export const findStorybookConfigFile = async (ctx: Context, pattern: RegExp) => {
+ const configDir = ctx.options.storybookConfigDir ?? '.storybook';
+ const files = await readdir(configDir);
+ const configFile = files.find((file) => pattern.test(file));
+ return configFile && join(configDir, configFile);
+};
+
export const getStorybookMetadata = async (ctx: Context) => {
const configDir = ctx.options.storybookConfigDir ?? '.storybook';
const r = typeof __non_webpack_require__ !== 'undefined' ? __non_webpack_require__ : require;
@@ -205,10 +212,7 @@ export const getStorybookMetadata = async (ctx: Context) => {
mainConfig = await r(path.resolve(configDir, 'main'));
} catch (storybookV6error) {
try {
- const files = await readdir(configDir);
- const mainConfigFileName = files.find((file) => file.startsWith('main')) || null;
- const mainConfigFilePath = join(configDir, mainConfigFileName);
- mainConfig = await readConfig(mainConfigFilePath);
+ mainConfig = await readConfig(await findStorybookConfigFile(ctx, /^main\.[jt]sx?$/));
v7 = true;
} catch (storybookV7error) {
mainConfig = null;
diff --git a/node-src/lib/log.ts b/node-src/lib/log.ts
index d9c6d62eb..978b6a769 100644
--- a/node-src/lib/log.ts
+++ b/node-src/lib/log.ts
@@ -1,4 +1,5 @@
import debug from 'debug';
+import { createWriteStream, unlink } from 'fs';
import stripAnsi from 'strip-ansi';
import { format } from 'util';
@@ -8,8 +9,10 @@ const { DISABLE_LOGGING, LOG_LEVEL = '' } = process.env;
const LOG_LEVELS = { silent: 0, error: 1, warn: 2, info: 3, debug: 4 };
const DEFAULT_LEVEL = 'info';
+export const CHROMATIC_LOG_FILE = 'chromatic.log';
+
// Top-level promise rejection handler to deal with initialization errors
-const handleRejection = (reason) => console.error('Unhandled promise rejection:', reason);
+const handleRejection = (reason: string) => console.error('Unhandled promise rejection:', reason);
process.on('unhandledRejection', handleRejection);
// Omits any JSON metadata, returning only the message string
@@ -30,6 +33,7 @@ export interface Logger {
warn: LogFn;
info: LogFn;
log: LogFn;
+ file: LogFn;
debug: LogFn;
queue: () => void;
flush: () => void;
@@ -40,18 +44,25 @@ export interface Logger {
export const createLogger = () => {
let level = (LOG_LEVEL.toLowerCase() as keyof typeof LOG_LEVELS) || DEFAULT_LEVEL;
if (DISABLE_LOGGING === 'true') level = 'silent';
+ if (level !== 'silent') unlink(CHROMATIC_LOG_FILE, () => {});
let interactive = !process.argv.slice(2).includes('--no-interactive');
let enqueue = false;
const queue = [];
+ const stream = level !== 'silent' ? createWriteStream(CHROMATIC_LOG_FILE, { flags: 'a' }) : null;
+ const appendToLogFile = (...messages: string[]) => stream?.write(messages.join(' ') + '\n');
+
const log =
- (type: LogType) =>
- (...args) => {
+ (type: LogType, logFileOnly?: boolean) =>
+ (...args: any[]) => {
if (LOG_LEVELS[level] < LOG_LEVELS[type]) return;
- // Convert the messages to an appropriate format
- const messages = interactive ? logInteractive(args) : logVerbose(type, args);
+ const logs = logVerbose(type, args);
+ appendToLogFile(...logs);
+ if (logFileOnly) return;
+
+ const messages = interactive ? logInteractive(args) : logs;
if (!messages.length) return;
// Queue up the logs or print them right away
@@ -71,6 +82,7 @@ export const createLogger = () => {
warn: log('warn'),
info: log('info'),
log: log('info'),
+ file: log('info', true),
debug: log('debug'),
queue: () => {
enqueue = true;
@@ -85,7 +97,7 @@ export const createLogger = () => {
},
};
- debug.log = (...args) => logger.debug(format(...args));
+ debug.log = (...args: any[]) => logger.debug(format(...args));
// Redirect unhandled promise rejections
process.off('unhandledRejection', handleRejection);
diff --git a/node-src/lib/parseArgs.ts b/node-src/lib/parseArgs.ts
index 6514ce92d..ae3525e47 100644
--- a/node-src/lib/parseArgs.ts
+++ b/node-src/lib/parseArgs.ts
@@ -49,6 +49,7 @@ export default function parseArgs(argv: string[]) {
--list List available stories. This requires running a full build.
--no-interactive Don't ask interactive questions about your setup and don't overwrite output. Always true in non-TTY environments.
--trace-changed [mode] Print dependency trace for changed files to affected story files. Set to "expanded" to list individual modules. Requires --only-changed.
+ --upload-metadata Upload Chromatic metadata files as part of the published Storybook. Includes chromatic-diagnostics.json, chromatic.log, and storybook-build.log, among others.
Deprecated options
--app-code Renamed to --project-token.
@@ -98,6 +99,7 @@ export default function parseArgs(argv: string[]) {
list: { type: 'boolean' },
interactive: { type: 'boolean', default: true },
traceChanged: { type: 'string' },
+ uploadMetadata: { type: 'boolean' },
// Deprecated options (for JSDOM and tunneled builds, among others)
allowConsoleErrors: { type: 'boolean' },
diff --git a/node-src/lib/upload.ts b/node-src/lib/upload.ts
new file mode 100644
index 000000000..c7e679e61
--- /dev/null
+++ b/node-src/lib/upload.ts
@@ -0,0 +1,112 @@
+import makeZipFile from './compress';
+import { Context, FileDesc, TargetedFile } from '../types';
+import { uploadZip, waitForUnpack } from './uploadZip';
+import { uploadFiles } from './uploadFiles';
+
+const GetUploadUrlsMutation = `
+ mutation GetUploadUrlsMutation($buildId: ObjID, $paths: [String!]!) {
+ getUploadUrls(buildId: $buildId, paths: $paths) {
+ domain
+ urls {
+ path
+ url
+ contentType
+ }
+ }
+ }
+`;
+interface GetUploadUrlsMutationResult {
+ getUploadUrls: {
+ domain: string;
+ urls: {
+ path: string;
+ url: string;
+ contentType: string;
+ }[];
+ };
+}
+
+const GetZipUploadUrlMutation = `
+ mutation GetZipUploadUrlMutation($buildId: ObjID) {
+ getZipUploadUrl(buildId: $buildId) {
+ domain
+ url
+ sentinelUrl
+ }
+ }
+`;
+interface GetZipUploadUrlMutationResult {
+ getZipUploadUrl: {
+ domain: string;
+ url: string;
+ sentinelUrl: string;
+ };
+}
+
+export async function uploadAsIndividualFiles(
+ ctx: Context,
+ files: FileDesc[],
+ options: {
+ onStart?: () => void;
+ onProgress?: (progress: number, total: number) => void;
+ onComplete?: (uploadedBytes: number, domain?: string) => void;
+ onError?: (error: Error, path?: string) => void;
+ } = {}
+) {
+ const { getUploadUrls } = await ctx.client.runQuery(
+ GetUploadUrlsMutation,
+ { buildId: ctx.announcedBuild.id, paths: files.map(({ targetPath }) => targetPath) }
+ );
+ const { domain, urls } = getUploadUrls;
+ const targets = urls.map(({ path, url, contentType }) => {
+ const file = files.find((f) => f.targetPath === path);
+ return { ...file, contentType, targetUrl: url };
+ });
+ const total = targets.reduce((acc, { contentLength }) => acc + contentLength, 0);
+
+ options.onStart?.();
+
+ try {
+ await uploadFiles(ctx, targets, (progress) => options.onProgress?.(progress, total));
+ } catch (e) {
+ return options.onError?.(e, files.some((f) => f.localPath === e.message) && e.message);
+ }
+
+ options.onComplete?.(total, domain);
+}
+
+export async function uploadAsZipFile(
+ ctx: Context,
+ files: FileDesc[],
+ options: {
+ onStart?: () => void;
+ onProgress?: (progress: number, total: number) => void;
+ onComplete?: (uploadedBytes: number, domain?: string) => void;
+ onError?: (error: Error, path?: string) => void;
+ } = {}
+) {
+ const originalSize = files.reduce((acc, { contentLength }) => acc + contentLength, 0);
+ const zipped = await makeZipFile(ctx, files);
+ const { path, size } = zipped;
+
+ if (size > originalSize) throw new Error('Zip file is larger than individual files');
+ ctx.log.debug(`Compression reduced upload size by ${originalSize - size} bytes`);
+
+ const { getZipUploadUrl } = await ctx.client.runQuery(
+ GetZipUploadUrlMutation,
+ { buildId: ctx.announcedBuild.id }
+ );
+ const { domain, url, sentinelUrl } = getZipUploadUrl;
+
+ options.onStart?.();
+
+ try {
+ await uploadZip(ctx, path, url, size, (progress) => options.onProgress?.(progress, size));
+ } catch (e) {
+ return options.onError?.(e, path);
+ }
+
+ await waitForUnpack(ctx, sentinelUrl);
+
+ options.onComplete?.(size, domain);
+}
diff --git a/node-src/lib/uploadFiles.ts b/node-src/lib/uploadFiles.ts
index f79576382..24dc4ddd8 100644
--- a/node-src/lib/uploadFiles.ts
+++ b/node-src/lib/uploadFiles.ts
@@ -2,18 +2,11 @@ import retry from 'async-retry';
import { createReadStream } from 'fs';
import pLimit from 'p-limit';
import progress from 'progress-stream';
-import { Context } from '../types';
+import { Context, TargetedFile } from '../types';
-interface File {
- path: string;
- url: string;
- contentType: string;
- contentLength: number;
-}
-
-export default async function uploadFiles(
+export async function uploadFiles(
ctx: Context,
- files: File[],
+ files: TargetedFile[],
onProgress: (progress: number) => void
) {
const { experimental_abortSignal: signal } = ctx.options;
@@ -21,10 +14,12 @@ export default async function uploadFiles(
let totalProgress = 0;
await Promise.all(
- files.map(({ path, url, contentType, contentLength }) => {
+ files.map(({ localPath, targetUrl, contentType, contentLength }) => {
let fileProgress = 0; // The bytes uploaded for this this particular file
- ctx.log.debug(`Uploading ${contentLength} bytes of ${contentType} for '${path}' to '${url}'`);
+ ctx.log.debug(
+ `Uploading ${contentLength} bytes of ${contentType} for '${localPath}' to '${targetUrl}'`
+ );
return limitConcurrency(() =>
retry(
@@ -42,10 +37,10 @@ export default async function uploadFiles(
});
const res = await ctx.http.fetch(
- url,
+ targetUrl,
{
method: 'PUT',
- body: createReadStream(path).pipe(progressStream),
+ body: createReadStream(localPath).pipe(progressStream),
headers: {
'content-type': contentType,
'content-length': contentLength.toString(),
@@ -57,17 +52,17 @@ export default async function uploadFiles(
);
if (!res.ok) {
- ctx.log.debug(`Uploading '${path}' failed: %O`, res);
- throw new Error(path);
+ ctx.log.debug(`Uploading '${localPath}' failed: %O`, res);
+ throw new Error(localPath);
}
- ctx.log.debug(`Uploaded '${path}'.`);
+ ctx.log.debug(`Uploaded '${localPath}'.`);
},
{
retries: ctx.env.CHROMATIC_RETRIES,
onRetry: (err: Error) => {
totalProgress -= fileProgress;
fileProgress = 0;
- ctx.log.debug('Retrying upload %s, %O', url, err);
+ ctx.log.debug('Retrying upload %s, %O', targetUrl, err);
onProgress(totalProgress);
},
}
diff --git a/node-src/lib/uploadMetadataFiles.ts b/node-src/lib/uploadMetadataFiles.ts
new file mode 100644
index 000000000..3fb9c4c20
--- /dev/null
+++ b/node-src/lib/uploadMetadataFiles.ts
@@ -0,0 +1,65 @@
+import { stat, writeFileSync } from 'fs';
+import { basename } from 'path';
+import { withFile } from 'tmp-promise';
+
+import { main as trimStatsFile } from '../../bin-src/trim-stats-file';
+import { STORYBOOK_BUILD_LOG_FILE } from '../tasks/build';
+import { Context, FileDesc } from '../types';
+import metadataHtml from '../ui/html/metadata.html';
+import uploadingMetadata from '../ui/messages/info/uploadingMetadata';
+import { findStorybookConfigFile } from './getStorybookMetadata';
+import { CHROMATIC_LOG_FILE } from './log';
+import { uploadAsIndividualFiles } from './upload';
+import { baseStorybookUrl } from './utils';
+import { CHROMATIC_DIAGNOSTICS_FILE } from './writeChromaticDiagnostics';
+
+const fileSize = (path: string): Promise =>
+ new Promise((resolve) => stat(path, (err, stats) => resolve(err ? 0 : stats.size)));
+
+export async function uploadMetadataFiles(ctx: Context) {
+ if (!ctx.announcedBuild) {
+ ctx.log.warn('No build announced, skipping metadata upload.');
+ return;
+ }
+
+ const metadataFiles = [
+ CHROMATIC_DIAGNOSTICS_FILE,
+ CHROMATIC_LOG_FILE,
+ STORYBOOK_BUILD_LOG_FILE,
+ await findStorybookConfigFile(ctx, /^main\.[jt]sx?$/).catch(() => null),
+ await findStorybookConfigFile(ctx, /^preview\.[jt]sx?$/).catch(() => null),
+ ctx.fileInfo?.statsPath && (await trimStatsFile([ctx.fileInfo.statsPath])),
+ ].filter(Boolean);
+
+ const files = await Promise.all(
+ metadataFiles.map(async (localPath) => {
+ const targetPath = `.chromatic/${basename(localPath)}`;
+ const contentLength = await fileSize(localPath);
+ return contentLength && { localPath, targetPath, contentLength };
+ })
+ ).then((files) =>
+ files
+ .filter(Boolean)
+ .sort((a, b) => a.targetPath.localeCompare(b.targetPath, 'en', { numeric: true }))
+ );
+
+ if (!files.length) {
+ ctx.log.warn('No metadata files found, skipping metadata upload.');
+ return;
+ }
+
+ await withFile(async ({ path }) => {
+ const html = metadataHtml(ctx, files);
+ writeFileSync(path, html);
+ files.push({
+ localPath: path,
+ targetPath: '.chromatic/index.html',
+ contentLength: html.length,
+ });
+
+ const directoryUrl = `${baseStorybookUrl(ctx.isolatorUrl)}/.chromatic/`;
+ ctx.log.info(uploadingMetadata(directoryUrl, files));
+
+ await uploadAsIndividualFiles(ctx, files);
+ });
+}
diff --git a/node-src/lib/utils.ts b/node-src/lib/utils.ts
index 31717c2bc..eb1b7b50b 100644
--- a/node-src/lib/utils.ts
+++ b/node-src/lib/utils.ts
@@ -34,7 +34,7 @@ export const activityBar = (n = 0, size = 20) => {
return `[${track.join('')}]`;
};
-export const baseStorybookUrl = (url: string) => url.replace(/\/iframe\.html$/, '');
+export const baseStorybookUrl = (url: string) => url?.replace(/\/iframe\.html$/, '');
export const rewriteErrorMessage = (err: Error, message: string) => {
try {
diff --git a/node-src/lib/writeChromaticDiagnostics.test.ts b/node-src/lib/writeChromaticDiagnostics.test.ts
new file mode 100644
index 000000000..c7c0a88c3
--- /dev/null
+++ b/node-src/lib/writeChromaticDiagnostics.test.ts
@@ -0,0 +1,25 @@
+import { describe, it, expect } from 'vitest';
+import { getDiagnostics } from './writeChromaticDiagnostics';
+
+describe('getDiagnostics', () => {
+ it('returns context object', () => {
+ const ctx = { build: { number: 1 } };
+ expect(getDiagnostics(ctx as any)).toEqual(ctx);
+ });
+
+ it('omits certain fields', () => {
+ const ctx = { argv: [], client: {}, env: {}, log: {}, pkg: {}, title: {} };
+ expect(getDiagnostics(ctx as any)).toEqual({});
+ });
+
+ it('redacts sensitive fields', () => {
+ const ctx = {
+ build: { number: 1, reportToken: 'foo' },
+ flags: { projectToken: 'bar' },
+ };
+ expect(getDiagnostics(ctx as any)).toEqual({
+ build: { number: 1, reportToken: undefined },
+ flags: { projectToken: undefined },
+ });
+ });
+});
diff --git a/node-src/lib/writeChromaticDiagnostics.ts b/node-src/lib/writeChromaticDiagnostics.ts
index cfd38c668..2816044a0 100644
--- a/node-src/lib/writeChromaticDiagnostics.ts
+++ b/node-src/lib/writeChromaticDiagnostics.ts
@@ -5,19 +5,32 @@ import wroteReport from '../ui/messages/info/wroteReport';
const { writeFile } = jsonfile;
+export const CHROMATIC_DIAGNOSTICS_FILE = 'chromatic-diagnostics.json';
+
+const redact = (value: T, ...fields: string[]): T => {
+ if (value === null || typeof value !== 'object') return value;
+ if (Array.isArray(value)) return value.map((item) => redact(item, ...fields)) as T;
+ const obj = { ...value };
+ for (const key in obj) obj[key] = fields.includes(key) ? undefined : redact(obj[key], ...fields);
+ return obj;
+};
+
export function getDiagnostics(ctx: Context) {
+ // Drop some fields that are not useful to have and redact sensitive fields
const { argv, client, env, help, http, log, pkg, title, ...rest } = ctx;
- return Object.keys(rest)
+ const data = redact(rest, 'projectToken', 'reportToken');
+
+ // Sort top-level fields alphabetically
+ return Object.keys(data)
.sort((a, b) => a.localeCompare(b))
- .reduce((acc, key) => ({ ...acc, [key]: rest[key] }), {});
+ .reduce((acc, key) => ({ ...acc, [key]: data[key] }), {});
}
// Extract important information from ctx, sort it and output into a json file
export async function writeChromaticDiagnostics(ctx: Context) {
try {
- const chromaticDiagnosticsPath = 'chromatic-diagnostics.json';
- await writeFile(chromaticDiagnosticsPath, getDiagnostics(ctx), { spaces: 2 });
- ctx.log.info(wroteReport(chromaticDiagnosticsPath, 'Chromatic diagnostics'));
+ await writeFile(CHROMATIC_DIAGNOSTICS_FILE, getDiagnostics(ctx), { spaces: 2 });
+ ctx.log.info(wroteReport(CHROMATIC_DIAGNOSTICS_FILE, 'Chromatic diagnostics'));
} catch (error) {
ctx.log.error(error);
}
diff --git a/node-src/runBuild.ts b/node-src/runBuild.ts
new file mode 100644
index 000000000..b406348ad
--- /dev/null
+++ b/node-src/runBuild.ts
@@ -0,0 +1,111 @@
+import Listr from 'listr';
+
+import GraphQLClient from './io/GraphQLClient';
+import { getConfiguration } from './lib/getConfiguration';
+import getOptions from './lib/getOptions';
+import LoggingRenderer from './lib/LoggingRenderer';
+import NonTTYRenderer from './lib/NonTTYRenderer';
+import { exitCodes, setExitCode } from './lib/setExitCode';
+import { rewriteErrorMessage } from './lib/utils';
+import getTasks from './tasks';
+import { Context } from './types';
+import buildCanceled from './ui/messages/errors/buildCanceled';
+import fatalError from './ui/messages/errors/fatalError';
+import fetchError from './ui/messages/errors/fetchError';
+import graphqlError from './ui/messages/errors/graphqlError';
+import missingStories from './ui/messages/errors/missingStories';
+import runtimeError from './ui/messages/errors/runtimeError';
+import taskError from './ui/messages/errors/taskError';
+import intro from './ui/messages/info/intro';
+import { endActivity } from './ui/components/activity';
+
+export async function runBuild(ctx: Context) {
+ ctx.log.info('');
+ ctx.log.info(intro(ctx));
+
+ try {
+ ctx.configuration = await getConfiguration(
+ ctx.extraOptions?.configFile || ctx.flags.configFile
+ );
+ ctx.options = await getOptions(ctx);
+ } catch (e) {
+ ctx.log.info('');
+ ctx.log.error(fatalError(ctx, [e]));
+ (ctx.options || ctx.extraOptions)?.experimental_onTaskError?.(ctx, {
+ formattedError: fatalError(ctx, [e]),
+ originalError: e,
+ });
+ setExitCode(ctx, exitCodes.INVALID_OPTIONS, true);
+ return;
+ }
+
+ try {
+ ctx.client = new GraphQLClient(ctx, `${ctx.env.CHROMATIC_INDEX_URL}/graphql`, {
+ headers: {
+ 'x-chromatic-session-id': ctx.sessionId,
+ 'x-chromatic-cli-version': ctx.pkg.version,
+ },
+ retries: 3,
+ });
+
+ try {
+ ctx.log.info('');
+ const options = {
+ log: ctx.log,
+ renderer: NonTTYRenderer,
+ };
+ if (ctx.options.interactive) {
+ // Use an enhanced version of Listr's default renderer, which also logs to a file
+ options.renderer = LoggingRenderer;
+ // Queue up any non-Listr log messages while Listr is running
+ ctx.log.queue();
+ }
+ await new Listr(getTasks(ctx.options), options).run(ctx);
+ } catch (err) {
+ endActivity(ctx);
+ if (err.code === 'ECONNREFUSED' || err.name === 'StatusCodeError') {
+ setExitCode(ctx, exitCodes.FETCH_ERROR);
+ throw rewriteErrorMessage(err, fetchError(ctx, err));
+ }
+ if (err.name === 'GraphQLError') {
+ setExitCode(ctx, exitCodes.GRAPHQL_ERROR);
+ throw rewriteErrorMessage(err, graphqlError(ctx, err));
+ }
+ if (err.message.startsWith('Cannot run a build with no stories')) {
+ setExitCode(ctx, exitCodes.BUILD_NO_STORIES);
+ throw rewriteErrorMessage(err, missingStories(ctx));
+ }
+ if (ctx.options.experimental_abortSignal?.aborted) {
+ setExitCode(ctx, exitCodes.BUILD_WAS_CANCELED, true);
+ throw rewriteErrorMessage(err, buildCanceled());
+ }
+ throw rewriteErrorMessage(err, taskError(ctx, err));
+ } finally {
+ // Handle potential runtime errors from JSDOM
+ const { runtimeErrors, runtimeWarnings } = ctx;
+ if ((runtimeErrors && runtimeErrors.length) || (runtimeWarnings && runtimeWarnings.length)) {
+ ctx.log.info('');
+ ctx.log.error(runtimeError(ctx));
+ }
+
+ ctx.log.flush();
+ }
+ } catch (error) {
+ const errors = [].concat(error); // GraphQLClient might throw an array of errors
+ const formattedError = fatalError(ctx, errors);
+
+ ctx.options.experimental_onTaskError?.(ctx, {
+ formattedError,
+ originalError: errors[0],
+ });
+
+ if (!ctx.userError) {
+ ctx.log.info('');
+ ctx.log.error(formattedError);
+ }
+
+ if (!ctx.exitCode) {
+ setExitCode(ctx, exitCodes.UNKNOWN_ERROR);
+ }
+ }
+}
diff --git a/node-src/tasks/build.ts b/node-src/tasks/build.ts
index 835177cc2..ea69a88ab 100644
--- a/node-src/tasks/build.ts
+++ b/node-src/tasks/build.ts
@@ -12,6 +12,8 @@ import buildFailed from '../ui/messages/errors/buildFailed';
import { failed, initial, pending, skipped, success } from '../ui/tasks/build';
import { getPackageManagerRunCommand } from '../lib/getPackageManager';
+export const STORYBOOK_BUILD_LOG_FILE = 'build-storybook.log';
+
export const setSourceDir = async (ctx: Context) => {
if (ctx.options.outputDir) {
ctx.sourceDir = ctx.options.outputDir;
@@ -49,7 +51,7 @@ const timeoutAfter = (ms) =>
new Promise((resolve, reject) => setTimeout(reject, ms, new Error(`Operation timed out`)));
export const buildStorybook = async (ctx: Context) => {
- ctx.buildLogFile = path.resolve('./build-storybook.log');
+ ctx.buildLogFile = path.resolve(STORYBOOK_BUILD_LOG_FILE);
const logFile = createWriteStream(ctx.buildLogFile);
await new Promise((resolve, reject) => {
logFile.on('open', resolve);
diff --git a/node-src/tasks/upload.test.ts b/node-src/tasks/upload.test.ts
index 0a6af577c..038b1d2d6 100644
--- a/node-src/tasks/upload.test.ts
+++ b/node-src/tasks/upload.test.ts
@@ -2,6 +2,7 @@ import { createReadStream, readdirSync, readFileSync, statSync } from 'fs';
import progressStream from 'progress-stream';
import { beforeEach, describe, expect, it, vi } from 'vitest';
+import { default as compress } from '../lib/compress';
import { getDependentStoryFiles as getDepStoryFiles } from '../lib/getDependentStoryFiles';
import { findChangedDependencies as findChangedDep } from '../lib/findChangedDependencies';
import { findChangedPackageFiles as findChangedPkg } from '../lib/findChangedPackageFiles';
@@ -9,11 +10,13 @@ import { validateFiles, traceChangedFiles, uploadStorybook } from './upload';
vi.mock('fs');
vi.mock('progress-stream');
+vi.mock('../lib/compress');
vi.mock('../lib/getDependentStoryFiles');
vi.mock('../lib/findChangedDependencies');
vi.mock('../lib/findChangedPackageFiles');
vi.mock('./read-stats-file');
+const makeZipFile = vi.mocked(compress);
const findChangedDependencies = vi.mocked(findChangedDep);
const findChangedPackageFiles = vi.mocked(findChangedPkg);
const getDependentStoryFiles = vi.mocked(getDepStoryFiles);
@@ -372,4 +375,59 @@ describe('uploadStorybook', () => {
unit: 'bytes',
});
});
+
+ describe('with zip', () => {
+ it('retrieves the upload location, adds the files to an archive and uploads it', async () => {
+ const client = { runQuery: vi.fn() };
+ client.runQuery.mockReturnValue({
+ getZipUploadUrl: {
+ domain: 'https://asdqwe.chromatic.com',
+ url: 'https://asdqwe.chromatic.com/storybook.zip',
+ sentinelUrl: 'https://asdqwe.chromatic.com/upload.txt',
+ },
+ });
+
+ makeZipFile.mockReturnValue(Promise.resolve({ path: 'storybook.zip', size: 80 }));
+ createReadStreamMock.mockReturnValue({ pipe: vi.fn() } as any);
+ http.fetch.mockReturnValue({ ok: true, text: () => Promise.resolve('OK') });
+ progress.mockReturnValue({ on: vi.fn() } as any);
+
+ const fileInfo = {
+ lengths: [
+ { knownAs: 'iframe.html', contentLength: 42 },
+ { knownAs: 'index.html', contentLength: 42 },
+ ],
+ paths: ['iframe.html', 'index.html'],
+ total: 84,
+ };
+ const ctx = {
+ client,
+ env,
+ log,
+ http,
+ sourceDir: '/static/',
+ options: { zip: true },
+ fileInfo,
+ announcedBuild: { id: '1' },
+ } as any;
+ await uploadStorybook(ctx, {} as any);
+
+ expect(client.runQuery).toHaveBeenCalledWith(
+ expect.stringMatching(/GetZipUploadUrlMutation/),
+ { buildId: '1' }
+ );
+ expect(http.fetch).toHaveBeenCalledWith(
+ 'https://asdqwe.chromatic.com/storybook.zip',
+ expect.objectContaining({
+ method: 'PUT',
+ headers: {
+ 'content-type': 'application/zip',
+ 'content-length': '80',
+ },
+ }),
+ expect.objectContaining({ retries: 0 })
+ );
+ expect(ctx.uploadedBytes).toBe(80);
+ });
+ });
});
diff --git a/node-src/tasks/upload.ts b/node-src/tasks/upload.ts
index 33eb9ebca..3175f9ed3 100644
--- a/node-src/tasks/upload.ts
+++ b/node-src/tasks/upload.ts
@@ -1,14 +1,10 @@
import { readdirSync, readFileSync, statSync } from 'fs';
import { join } from 'path';
import slash from 'slash';
-import { URL } from 'url';
import { getDependentStoryFiles } from '../lib/getDependentStoryFiles';
import { createTask, transitionTo } from '../lib/tasks';
-import makeZipFile from '../lib/compress';
-import uploadFiles from '../lib/uploadFiles';
import { matchesFile, rewriteErrorMessage, throttle } from '../lib/utils';
-import { uploadZip, waitForUnpack } from '../lib/uploadZip';
import deviatingOutputDir from '../ui/messages/warnings/deviatingOutputDir';
import missingStatsFile from '../ui/messages/warnings/missingStatsFile';
import {
@@ -30,46 +26,7 @@ import { readStatsFile } from './read-stats-file';
import bailFile from '../ui/messages/warnings/bailFile';
import { findChangedPackageFiles } from '../lib/findChangedPackageFiles';
import { findChangedDependencies } from '../lib/findChangedDependencies';
-
-const GetUploadUrlsMutation = `
- mutation GetUploadUrlsMutation($buildId: ObjID, $paths: [String!]!) {
- getUploadUrls(buildId: $buildId, paths: $paths) {
- domain
- urls {
- path
- url
- contentType
- }
- }
- }
-`;
-interface GetUploadUrlsMutationResult {
- getUploadUrls: {
- domain: string;
- urls: {
- path: string;
- url: string;
- contentType: string;
- }[];
- };
-}
-
-const GetZipUploadUrlMutation = `
- mutation GetZipUploadUrlMutation($buildId: ObjID) {
- getZipUploadUrl(buildId: $buildId) {
- domain
- url
- sentinelUrl
- }
- }
-`;
-interface GetZipUploadUrlMutationResult {
- getZipUploadUrl: {
- domain: string;
- url: string;
- sentinelUrl: string;
- };
-}
+import { uploadAsIndividualFiles, uploadAsZipFile } from '../lib/upload';
interface PathSpec {
pathname: string;
@@ -153,6 +110,8 @@ export const traceChangedFiles = async (ctx: Context, task: Task) => {
transitionTo(tracing)(ctx, task);
const statsPath = join(ctx.sourceDir, ctx.fileInfo.statsPath);
+ ctx.fileInfo.statsPath = statsPath;
+
const { changedFiles, packageManifestChanges } = ctx.git;
try {
const changedDependencyNames = await findChangedDependencies(ctx).catch((err) => {
@@ -223,93 +182,49 @@ export const traceChangedFiles = async (ctx: Context, task: Task) => {
}
};
-async function uploadAsIndividualFiles(
- ctx: Context,
- task: Task,
- updateProgress: (progress: number, total: number) => void
-) {
- const { lengths, paths, total } = ctx.fileInfo;
- const { getUploadUrls } = await ctx.client.runQuery(
- GetUploadUrlsMutation,
- { buildId: ctx.announcedBuild.id, paths }
- );
- const { domain, urls } = getUploadUrls;
- const files = urls.map(({ path, url, contentType }) => ({
- path: join(ctx.sourceDir, path),
- url,
- contentType,
- contentLength: lengths.find(({ knownAs }) => knownAs === path).contentLength,
- }));
-
- task.output = starting().output;
-
- try {
- await uploadFiles(ctx, files, (progress) => updateProgress(progress, total));
- } catch (e) {
- if (files.find(({ path }) => path === e.message)) {
- throw new Error(failed({ path: e.message }).output);
- }
- throw e;
- }
-
- ctx.uploadedBytes = total;
- ctx.isolatorUrl = new URL('/iframe.html', domain).toString();
-}
-
-async function uploadAsZipFile(
- ctx: Context,
- task: Task,
- updateProgress: (progress: number, total: number) => void
-) {
- const zipped = await makeZipFile(ctx);
- const { path, size: total } = zipped;
- const { getZipUploadUrl } = await ctx.client.runQuery(
- GetZipUploadUrlMutation,
- { buildId: ctx.announcedBuild.id }
- );
- const { domain, url, sentinelUrl } = getZipUploadUrl;
-
- task.output = starting().output;
-
- try {
- await uploadZip(ctx, path, url, total, (progress) => updateProgress(progress, total));
- } catch (e) {
- if (path === e.message) {
- throw new Error(failed({ path }).output);
- }
- throw e;
- }
-
- ctx.uploadedBytes = total;
- ctx.isolatorUrl = new URL('/iframe.html', domain).toString();
-
- return waitForUnpack(ctx, sentinelUrl);
-}
-
export const uploadStorybook = async (ctx: Context, task: Task) => {
if (ctx.skip) return;
transitionTo(preparing)(ctx, task);
- const updateProgress = throttle(
- (progress, total) => {
- const percentage = Math.round((progress / total) * 100);
- task.output = uploading({ percentage }).output;
-
- ctx.options.experimental_onTaskProgress?.({ ...ctx }, { progress, total, unit: 'bytes' });
+ const options = {
+ onStart: () => (task.output = starting().output),
+ onProgress: throttle(
+ (progress, total) => {
+ const percentage = Math.round((progress / total) * 100);
+ task.output = uploading({ percentage }).output;
+
+ ctx.options.experimental_onTaskProgress?.({ ...ctx }, { progress, total, unit: 'bytes' });
+ },
+ // Avoid spamming the logs with progress updates in non-interactive mode
+ ctx.options.interactive ? 100 : ctx.env.CHROMATIC_OUTPUT_INTERVAL
+ ),
+ onComplete: (uploadedBytes: number, domain: string) => {
+ ctx.uploadedBytes = uploadedBytes;
+ ctx.isolatorUrl = new URL('/iframe.html', domain).toString();
},
- // Avoid spamming the logs with progress updates in non-interactive mode
- ctx.options.interactive ? 100 : ctx.env.CHROMATIC_OUTPUT_INTERVAL
- );
+ onError: (error: Error, path?: string) => {
+ throw path === error.message ? new Error(failed({ path }).output) : error;
+ },
+ };
+
+ const files = ctx.fileInfo.paths.map((path) => ({
+ localPath: join(ctx.sourceDir, path),
+ targetPath: path,
+ contentLength: ctx.fileInfo.lengths.find(({ knownAs }) => knownAs === path).contentLength,
+ }));
if (ctx.options.zip) {
try {
- await uploadAsZipFile(ctx, task, updateProgress);
+ await uploadAsZipFile(ctx, files, options);
} catch (err) {
- ctx.log.debug({ err }, 'Error uploading zip file');
- await uploadAsIndividualFiles(ctx, task, updateProgress);
+ ctx.log.debug(
+ { err },
+ 'Error uploading zip file, falling back to uploading individual files'
+ );
+ await uploadAsIndividualFiles(ctx, files, options);
}
} else {
- await uploadAsIndividualFiles(ctx, task, updateProgress);
+ await uploadAsIndividualFiles(ctx, files, options);
}
};
diff --git a/node-src/types.ts b/node-src/types.ts
index 008f979bc..6f6873602 100644
--- a/node-src/types.ts
+++ b/node-src/types.ts
@@ -43,6 +43,7 @@ export interface Flags {
list?: boolean;
interactive?: boolean;
traceChanged?: string;
+ uploadMetadata?: boolean;
// Deprecated options (for JSDOM and tunneled builds, among others)
allowConsoleErrors?: boolean;
@@ -70,6 +71,7 @@ export interface Options {
diagnostics: boolean;
interactive: boolean;
junitReport: boolean | string;
+ uploadMetadata?: Flags['uploadMetadata'];
zip: Flags['zip'];
autoAcceptChanges: boolean | string;
@@ -211,6 +213,7 @@ export interface Context {
packageName?: string;
packageVersion?: string;
};
+ mainConfigFilePath?: string;
};
isolatorUrl: string;
cachedUrl: string;
@@ -340,3 +343,14 @@ export interface Module {
export interface Stats {
modules: Module[];
}
+
+export interface FileDesc {
+ contentLength: number;
+ localPath: string;
+ targetPath: string;
+}
+
+export interface TargetedFile extends FileDesc {
+ contentType: string;
+ targetUrl: string;
+}
diff --git a/node-src/ui/html/metadata.html.stories.ts b/node-src/ui/html/metadata.html.stories.ts
new file mode 100644
index 000000000..e197743ee
--- /dev/null
+++ b/node-src/ui/html/metadata.html.stories.ts
@@ -0,0 +1,52 @@
+import metadataHtml from './metadata.html';
+
+export default {
+ title: 'HTML/Metadata index',
+ includeStories: /^[A-Z]/,
+};
+
+export const files = [
+ {
+ contentLength: 833,
+ localPath: 'build-storybook.log',
+ targetPath: '.chromatic/build-storybook.log',
+ },
+ {
+ contentLength: 674,
+ localPath: 'chromatic.log',
+ targetPath: '.chromatic/chromatic.log',
+ },
+ {
+ contentLength: 3645,
+ localPath: 'chromatic-diagnostics.json',
+ targetPath: '.chromatic/chromatic-diagnostics.json',
+ },
+ {
+ contentLength: 423,
+ localPath: 'main.ts',
+ targetPath: '.chromatic/main.ts',
+ },
+ {
+ contentLength: 5635,
+ localPath: 'preview.tsx',
+ targetPath: '.chromatic/preview.tsx',
+ },
+ {
+ contentLength: 5635,
+ localPath: 'preview-stats.json',
+ targetPath: '.chromatic/preview-stats.json',
+ },
+];
+
+const announced: any = { announcedBuild: { number: 7805 } };
+
+const build: any = {
+ ...announced,
+ build: { webUrl: 'https://www.chromatic.com/build?appId=5d67dc0374b2e300209c41e7&number=7805' },
+};
+
+const date = new Date('2023-10-12T12:05:23.706Z');
+
+export const Default = () => metadataHtml(announced, files, date);
+
+export const WithBuildLink = () => metadataHtml(build, files, date);
diff --git a/node-src/ui/html/metadata.html.ts b/node-src/ui/html/metadata.html.ts
new file mode 100644
index 000000000..1e2ef012b
--- /dev/null
+++ b/node-src/ui/html/metadata.html.ts
@@ -0,0 +1,78 @@
+import { filesize } from 'filesize';
+import { Context, FileDesc } from '../../types';
+
+const linkIcon =
+ '';
+
+export default (
+ { announcedBuild, build }: Context,
+ files: FileDesc[],
+ creationDate: Date = new Date()
+) => `
+
+
+
+
+ Build ${announcedBuild.number} metadata files
+
+
+
+
+ Build ${announcedBuild.number} ${
+ build ? `${linkIcon}` : ''
+}
+ Metadata files
+
+ ${files
+ .map(({ targetPath, contentLength }) => {
+ const path = targetPath.replace(/^\.chromatic\//, '');
+ const size = filesize(contentLength);
+ return `- ${path} (${size})
`;
+ })
+ .join('')}
+
+ Generated on ${creationDate.toLocaleString('en', {
+ timeStyle: 'medium',
+ dateStyle: 'full',
+ hourCycle: 'h24',
+ timeZone: 'UTC',
+ })} UTC
+
+`;
diff --git a/node-src/ui/messages/info/uploadingMetadata.stories.ts b/node-src/ui/messages/info/uploadingMetadata.stories.ts
new file mode 100644
index 000000000..7a62e11b2
--- /dev/null
+++ b/node-src/ui/messages/info/uploadingMetadata.stories.ts
@@ -0,0 +1,10 @@
+import { files } from '../../html/metadata.html.stories';
+import uploadingMetadata from './uploadingMetadata';
+
+export default {
+ title: 'CLI/Messages/Info',
+};
+
+const directoryUrl = 'https://5d67dc0374b2e300209c41e7-dlmmxasauj.chromatic.com/.chromatic/';
+
+export const UploadingMetadata = () => uploadingMetadata(directoryUrl, files);
diff --git a/node-src/ui/messages/info/uploadingMetadata.ts b/node-src/ui/messages/info/uploadingMetadata.ts
new file mode 100644
index 000000000..d22300ab3
--- /dev/null
+++ b/node-src/ui/messages/info/uploadingMetadata.ts
@@ -0,0 +1,12 @@
+import chalk from 'chalk';
+import pluralize from 'pluralize';
+
+import { info } from '../../components/icons';
+import { FileDesc } from '../../../types';
+import link from '../../components/link';
+
+export default (directoryUrl: string, files: FileDesc[]) => {
+ const count = pluralize('metadata file', files.length, true);
+ const list = `- ${files.map((f) => f.targetPath.replace(/^\.chromatic\//, '')).join('\n- ')}`;
+ return chalk`${info} Uploading {bold ${count}} to ${link(directoryUrl)}\n${list}`;
+};
diff --git a/package.json b/package.json
index 7ca9419e2..b2610fc99 100644
--- a/package.json
+++ b/package.json
@@ -155,6 +155,7 @@
"esm": "^3.2.25",
"execa": "^7.2.0",
"fake-tag": "^2.0.0",
+ "filesize": "^10.1.0",
"fs-extra": "^10.0.0",
"https-proxy-agent": "^7.0.2",
"husky": "^7.0.0",
diff --git a/yarn.lock b/yarn.lock
index da2f4e41d..6a00b7d7a 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -8079,6 +8079,11 @@ file-uri-to-path@1.0.0:
resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==
+filesize@^10.1.0:
+ version "10.1.0"
+ resolved "https://registry.yarnpkg.com/filesize/-/filesize-10.1.0.tgz#846f5cd8d16e073c5d6767651a8264f6149183cd"
+ integrity sha512-GTLKYyBSDz3nPhlLVPjPWZCnhkd9TrrRArNcy8Z+J2cqScB7h2McAzR6NBX6nYOoWafql0roY8hrocxnZBv9CQ==
+
fill-range@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7"