) {
+ const uploadPromise = new Promise(async (resolve, reject) => {
const uploadStream = cloudinary.v2.uploader.upload_stream(
{
folder: "remix",
@@ -16,12 +16,15 @@ async function uploadImage(fileStream: Stream) {
(error, result) => {
if (error) {
reject(error);
+ return;
}
resolve(result);
}
);
- fileStream.pipe(uploadStream);
+ await writeAsyncIterableToWritable(data, uploadStream);
});
+
+ return uploadPromise;
}
console.log("configs", cloudinary.v2.config());
diff --git a/integration/action-test.ts b/integration/action-test.ts
index 80b3078de2d..9e92dc1b681 100644
--- a/integration/action-test.ts
+++ b/integration/action-test.ts
@@ -1,5 +1,4 @@
import { test, expect } from "@playwright/test";
-import path from "path";
import { createFixture, createAppFixture, js } from "./helpers/create-fixture";
import type { Fixture, AppFixture } from "./helpers/create-fixture";
@@ -11,12 +10,9 @@ test.describe("actions", () => {
let FIELD_NAME = "message";
let WAITING_VALUE = "Waiting...";
- let ACTION_DATA_VALUE = "heyooo, data from the action:";
let SUBMITTED_VALUE = "Submission";
let THROWS_REDIRECT = "redirect-throw";
let REDIRECT_TARGET = "page";
- let HAS_FILE_ACTIONS = "file-actions";
- let MAX_FILE_UPLOAD_SIZE = 1234;
let PAGE_TEXT = "PAGE_TEXT";
test.beforeAll(async () => {
@@ -69,94 +65,6 @@ test.describe("actions", () => {
return ${PAGE_TEXT}
}
`,
-
- [`app/routes/${HAS_FILE_ACTIONS}.jsx`]: js`
- import {
- json,
- unstable_parseMultipartFormData as parseMultipartFormData,
- unstable_createFileUploadHandler as createFileUploadHandler,
- } from "@remix-run/node";
- import { Form, useActionData } from "@remix-run/react";
-
- export async function action({ request }) {
- const uploadHandler = createFileUploadHandler({
- directory: ".tmp/uploads",
- maxFileSize: ${MAX_FILE_UPLOAD_SIZE},
- // You probably do *not* want to do this in prod.
- // We passthrough the name and allow conflicts for test fixutres.
- avoidFileConflicts: false,
- file: ({ filename }) => filename,
- });
-
- let files = [];
- let formData = await parseMultipartFormData(request, uploadHandler);
-
- let file = formData.get("file");
- if (file && typeof file !== "string") {
- files.push({ name: file.name, size: file.size });
- }
-
- return json(
- {
- files,
- message: "${ACTION_DATA_VALUE} " + formData.get("field1"),
- },
- {
- headers: {
- "x-test": "works",
- },
- }
- );
- };
-
- export function headers({ actionHeaders }) {
- return {
- "x-test": actionHeaders.get("x-test"),
- };
- };
-
- export function ErrorBoundary({ error }) {
- return (
-
-
Actions Error Boundary
-
{error.message}
-
- );
- }
-
- export default function Actions() {
- let { files, message } = useActionData() || {};
-
- return (
-
- );
- }
- `,
},
});
@@ -229,116 +137,4 @@ test.describe("actions", () => {
expect(new URL(page.url()).pathname).toBe(`/${REDIRECT_TARGET}`);
expect(await app.getHtml()).toMatch(PAGE_TEXT);
});
-
- test("can upload file with JavaScript", async ({ page }) => {
- let app = new PlaywrightFixture(appFixture, page);
- await app.goto(`/${HAS_FILE_ACTIONS}`);
-
- let html = await app.getHtml("#action-text");
- expect(html).toMatch(WAITING_VALUE);
-
- await app.uploadFile(
- "#file",
- path.resolve(__dirname, "assets/toupload.txt")
- );
-
- await page.click("button[type=submit]");
- await page.waitForSelector("#action-data");
-
- html = await app.getHtml("#action-text");
- expect(html).toMatch(ACTION_DATA_VALUE + " stuff");
- });
-
- // TODO: figure out what the heck is wrong with this test...
- // For some reason the error message is "Unexpected Server Error" in the test
- // but if you try the app in the browser it works as expected.
- test.skip("rejects too big of an upload with JavaScript", async ({
- page,
- }) => {
- let app = new PlaywrightFixture(appFixture, page);
- await app.goto(`/${HAS_FILE_ACTIONS}`);
-
- let html = await app.getHtml("#action-text");
- expect(html).toMatch(WAITING_VALUE);
-
- await app.uploadFile(
- "#file",
- path.resolve(__dirname, "assets/touploadtoobig.txt")
- );
-
- await page.click("button[type=submit]");
- await page.waitForSelector("#actions-error-boundary");
-
- let text = await app.getHtml("#actions-error-text");
- expect(text).toMatch(
- `Field "file" exceeded upload size of ${MAX_FILE_UPLOAD_SIZE} bytes`
- );
-
- let logs: string[] = [];
- page.on("console", (msg) => {
- logs.push(msg.text());
- });
- expect(logs).toHaveLength(1);
- expect(logs[0]).toMatch(/exceeded upload size/i);
- });
-
- test.describe("without JavaScript", () => {
- test.use({ javaScriptEnabled: false });
-
- test("can upload file", async ({ page }) => {
- let app = new PlaywrightFixture(appFixture, page);
- await app.goto(`/${HAS_FILE_ACTIONS}`);
-
- let html = await app.getHtml("#action-text");
- expect(html).toMatch(WAITING_VALUE);
-
- await app.uploadFile(
- "#file",
- path.resolve(__dirname, "assets/toupload.txt")
- );
-
- let [response] = await Promise.all([
- page.waitForNavigation(),
- page.click("#submit"),
- ]);
-
- expect(response!.status()).toBe(200);
- expect(response!.headers()["x-test"]).toBe("works");
-
- html = await app.getHtml("#action-text");
- expect(html).toMatch(ACTION_DATA_VALUE + " stuff");
- });
-
- // TODO: figure out what the heck is wrong with this test...
- // "Failed to load resource: the server responded with a status of 500 (Internal Server Error)"
- test.skip("rejects too big of an upload", async ({ page }) => {
- let app = new PlaywrightFixture(appFixture, page);
- let logs: string[] = [];
- page.on("console", (msg) => {
- logs.push(msg.text());
- });
-
- await app.goto(`/${HAS_FILE_ACTIONS}`);
-
- let html = await app.getHtml("#action-text");
- expect(html).toMatch(WAITING_VALUE);
-
- await app.uploadFile(
- "#file",
- path.resolve(__dirname, "assets/touploadtoobig.txt")
- );
-
- let [response] = await Promise.all([
- page.waitForNavigation(),
- page.click("#submit"),
- ]);
- expect(response!.status()).toBe(500);
- let text = await app.getHtml("#actions-error-text");
- let errorMessage = `Field "file" exceeded upload size of ${MAX_FILE_UPLOAD_SIZE} bytes`;
- expect(text).toMatch(errorMessage);
-
- expect(logs).toHaveLength(1);
- expect(logs[0]).toMatch(/error running.*action.*routes\/file-actions/i);
- });
- });
});
diff --git a/integration/assets/toupload.txt b/integration/assets/toupload.txt
index 8ab686eafeb..b45ef6fec89 100644
--- a/integration/assets/toupload.txt
+++ b/integration/assets/toupload.txt
@@ -1 +1 @@
-Hello, World!
+Hello, World!
\ No newline at end of file
diff --git a/integration/file-uploads-test.ts b/integration/file-uploads-test.ts
index 99af235cfc2..189bf24308c 100644
--- a/integration/file-uploads-test.ts
+++ b/integration/file-uploads-test.ts
@@ -15,17 +15,24 @@ test.describe("file-uploads", () => {
files: {
"app/fileUploadHandler.js": js`
import * as path from "path";
- import { unstable_createFileUploadHandler as createFileUploadHandler } from "@remix-run/node";
-
- export let uploadHandler = createFileUploadHandler({
- directory: path.resolve(__dirname, "..", "uploads"),
- maxFileSize: 10_000, // 10kb
- // you probably want to avoid conflicts in production
- // do not set to false or passthrough filename in real
- // applications.
- avoidFileConflicts: false,
- file: ({ filename }) => filename
- });
+ import {
+ unstable_composeUploadHandlers as composeUploadHandlers,
+ unstable_createFileUploadHandler as createFileUploadHandler,
+ unstable_createMemoryUploadHandler as createMemoryUploadHandler,
+ } from "@remix-run/node";
+
+ export let uploadHandler = composeUploadHandlers(
+ createFileUploadHandler({
+ directory: path.resolve(__dirname, "..", "uploads"),
+ maxPartSize: 10_000, // 10kb
+ // you probably want to avoid conflicts in production
+ // do not set to false or passthrough filename in real
+ // applications.
+ avoidFileConflicts: false,
+ file: ({ filename }) => filename
+ }),
+ createMemoryUploadHandler(),
+ );
`,
"app/routes/file-upload.jsx": js`
import {
@@ -38,10 +45,13 @@ test.describe("file-uploads", () => {
try {
let formData = await parseMultipartFormData(request, uploadHandler);
- let file = formData.get("file");
+ if (formData.get("test") !== "hidden") {
+ return { errorMessage: "hidden field not in form data" };
+ }
+ let file = formData.get("file");
if (typeof file === "string" || !file) {
- throw new Error("invalid file type");
+ return { errorMessage: "invalid file type" };
}
return { name: file.name, size: file.size };
@@ -56,6 +66,7 @@ test.describe("file-uploads", () => {
{JSON.stringify(useActionData(), null, 2)}
diff --git a/integration/upload-test.ts b/integration/upload-test.ts
new file mode 100644
index 00000000000..b3b88909a7d
--- /dev/null
+++ b/integration/upload-test.ts
@@ -0,0 +1,314 @@
+import * as path from "path";
+import { test, expect } from "@playwright/test";
+
+import { PlaywrightFixture } from "./helpers/playwright-fixture";
+import type { Fixture, AppFixture } from "./helpers/create-fixture";
+import { createAppFixture, createFixture, js } from "./helpers/create-fixture";
+
+let fixture: Fixture;
+let appFixture: AppFixture;
+
+test.beforeAll(async () => {
+ fixture = await createFixture({
+ files: {
+ "app/routes/file-upload-handler.jsx": js`
+ import {
+ json,
+ unstable_composeUploadHandlers as composeUploadHandlers,
+ unstable_createFileUploadHandler as createFileUploadHandler,
+ unstable_createMemoryUploadHandler as createMemoryUploadHandler,
+ unstable_parseMultipartFormData as parseMultipartFormData,
+ MaxPartSizeExceededError,
+ } from "@remix-run/node";
+ import { Form, useActionData } from "@remix-run/react";
+
+ export let action = async ({ request }) => {
+ let uploadHandler = composeUploadHandlers(
+ createFileUploadHandler({
+ directory: "./uploads",
+ maxPartSize: 13,
+ avoidFileConflicts: false,
+ file: ({ filename }) => filename,
+ }),
+ createMemoryUploadHandler(),
+ );
+
+ try {
+ let formData = await parseMultipartFormData(request, uploadHandler);
+
+ if (formData.get("test") !== "hidden") {
+ return { message: "hidden field not in form data" };
+ }
+
+ let file = formData.get("file");
+ let size = typeof file !== "string" && file ? file.size : 0;
+
+ return json({ message: "SUCCESS", size });
+ } catch (error) {
+ if (error instanceof MaxPartSizeExceededError) {
+ return json(
+ { message: "FILE_TOO_LARGE", size: error.maxBytes },
+ { status: 413, headers: { "Connection": "close" } }
+ );
+ }
+ return json({ message: "ERROR" }, 500);
+ }
+ };
+
+ export default function FileUpload() {
+ let { message, size } = useActionData() || {};
+ return (
+
+
+
+ );
+ }
+ `,
+
+ "app/routes/memory-upload-handler.jsx": js`
+ import {
+ json,
+ unstable_createMemoryUploadHandler as createMemoryUploadHandler,
+ unstable_parseMultipartFormData as parseMultipartFormData,
+ MaxPartSizeExceededError,
+ } from "@remix-run/node";
+ import { Form, useActionData } from "@remix-run/react";
+
+ export let action = async ({ request }) => {
+ let uploadHandler = createMemoryUploadHandler({
+ maxPartSize: 13,
+ });
+
+ try {
+ let formData = await parseMultipartFormData(request, uploadHandler);
+
+ if (formData.get("test") !== "hidden") {
+ return { message: "hidden field not in form data" };
+ }
+
+ let file = formData.get("file");
+ let size = typeof file !== "string" && file ? file.size : 0;
+
+ return json({ message: "SUCCESS", size });
+ } catch (error) {
+ if (error instanceof MaxPartSizeExceededError) {
+ return json(
+ { message: "FILE_TOO_LARGE", size: error.maxBytes },
+ { status: 413, headers: { "Connection": "close" } }
+ );
+ }
+ return json({ message: "ERROR" }, 500);
+ }
+ };
+
+ export default function MemoryUpload() {
+ let { message, size } = useActionData() || {};
+ return (
+
+
+
+ );
+ }
+ `,
+
+ "app/routes/passthrough-upload-handler.jsx": js`
+ import {
+ json,
+ unstable_parseMultipartFormData as parseMultipartFormData,
+ } from "@remix-run/node";
+ import { Form, useActionData } from "@remix-run/react";
+
+ export let action = async ({ request }) => {
+ try {
+ let formData = await parseMultipartFormData(request, () => undefined);
+
+ return json(
+ { message: "SUCCESS", size: 0 },
+ );
+ } catch (error) {
+ return json(
+ { message: "ERROR" },
+ { status: 500, headers: { "Connection": "close" } }
+ );
+ }
+ };
+
+ export default function PassthroughUpload() {
+ let { message, size } = useActionData() || {};
+ return (
+
+
+
+ );
+ }
+ `,
+ },
+ });
+
+ appFixture = await createAppFixture(fixture);
+});
+
+test.afterAll(async () => appFixture.close());
+
+test("can upload a file with createFileUploadHandler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/file-upload-handler");
+ await app.uploadFile("#file", path.resolve(__dirname, "assets/toupload.txt"));
+ await app.clickSubmitButton("/file-upload-handler");
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+});
+
+test("can catch MaxPartSizeExceededError when file is too big with createFileUploadHandler", async ({
+ page,
+}) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/file-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/touploadtoobig.txt")
+ );
+ await app.clickSubmitButton("/file-upload-handler");
+
+ expect(await app.getHtml("#message")).toMatch(">FILE_TOO_LARGE<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+});
+
+test("can upload a file with createMemoryUploadHandler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/memory-upload-handler");
+ await app.uploadFile("#file", path.resolve(__dirname, "assets/toupload.txt"));
+ await app.clickSubmitButton("/memory-upload-handler");
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+});
+
+test("can upload a file with a passthrough handler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/passthrough-upload-handler");
+ await app.uploadFile("#file", path.resolve(__dirname, "assets/toupload.txt"));
+ await app.clickSubmitButton("/passthrough-upload-handler");
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+});
+
+test("can catch MaxPartSizeExceededError when file is too big with createMemoryUploadHandler", async ({
+ page,
+}) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/memory-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/touploadtoobig.txt")
+ );
+ await app.clickSubmitButton("/memory-upload-handler");
+
+ expect(await app.getHtml("#message")).toMatch(">FILE_TOO_LARGE<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+});
+
+test.describe("without javascript", () => {
+ test.use({ javaScriptEnabled: false });
+
+ test("can upload a file with createFileUploadHandler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/file-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/toupload.txt")
+ );
+
+ await Promise.all([page.click("#submit"), page.waitForNavigation()]);
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+ });
+
+ test("can catch MaxPartSizeExceededError when file is too big with createFileUploadHandler", async ({
+ page,
+ }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/file-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/touploadtoobig.txt")
+ );
+
+ await Promise.all([page.click("#submit"), page.waitForNavigation()]);
+
+ expect(await app.getHtml("#message")).toMatch(">FILE_TOO_LARGE<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+ });
+
+ test("can upload a file with createMemoryUploadHandler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/memory-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/toupload.txt")
+ );
+
+ await Promise.all([page.click("#submit"), page.waitForNavigation()]);
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+ });
+
+ test("can upload a file with passthrough handler", async ({ page }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/passthrough-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/toupload.txt")
+ );
+
+ await Promise.all([page.click("#submit"), page.waitForNavigation()]);
+
+ expect(await app.getHtml("#message")).toMatch(">SUCCESS<");
+ });
+
+ test("can catch MaxPartSizeExceededError when file is too big with createMemoryUploadHandler", async ({
+ page,
+ }) => {
+ let app = new PlaywrightFixture(appFixture, page);
+ await app.goto("/memory-upload-handler");
+ await app.uploadFile(
+ "#file",
+ path.resolve(__dirname, "assets/touploadtoobig.txt")
+ );
+
+ await Promise.all([page.click("#submit"), page.waitForNavigation()]);
+
+ expect(await app.getHtml("#message")).toMatch(">FILE_TOO_LARGE<");
+ expect(await app.getHtml("#size")).toMatch(">13<");
+ });
+});
diff --git a/packages/remix-architect/__tests__/server-test.ts b/packages/remix-architect/__tests__/server-test.ts
index 09b98ad0762..0dc6bca7a42 100644
--- a/packages/remix-architect/__tests__/server-test.ts
+++ b/packages/remix-architect/__tests__/server-test.ts
@@ -160,7 +160,8 @@ describe("architect createRemixHeaders", () => {
it("handles empty headers", () => {
expect(createRemixHeaders({}, undefined)).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {},
+ Symbol(query): Array [],
+ Symbol(context): null,
}
`);
});
@@ -169,11 +170,11 @@ describe("architect createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar" }, undefined))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -182,14 +183,13 @@ describe("architect createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar", "x-bar": "baz" }, undefined))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -198,11 +198,11 @@ describe("architect createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar, baz" }, undefined))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -212,14 +212,13 @@ describe("architect createRemixHeaders", () => {
createRemixHeaders({ "x-foo": "bar, baz", "x-bar": "baz" }, undefined)
).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -232,14 +231,13 @@ describe("architect createRemixHeaders", () => {
])
).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "Cookie": Array [
- "__session=some_value; __other=some_other_value",
- ],
- "x-something-else": Array [
- "true",
- ],
- },
+ Symbol(query): Array [
+ "x-something-else",
+ "true",
+ "cookie",
+ "__session=some_value; __other=some_other_value",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -256,61 +254,45 @@ describe("architect createRemixRequest", () => {
)
).toMatchInlineSnapshot(`
NodeRequest {
- "abortController": undefined,
"agent": undefined,
"compress": true,
"counter": 0,
"follow": 20,
+ "highWaterMark": 16384,
+ "insecureHTTPParser": false,
"size": 0,
- "timeout": 0,
Symbol(Body internals): Object {
"body": null,
+ "boundary": null,
"disturbed": false,
"error": null,
+ "size": 0,
+ "type": null,
},
Symbol(Request internals): Object {
"headers": Headers {
- Symbol(map): Object {
- "Cookie": Array [
- "__session=value",
- ],
- "accept": Array [
- "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
- ],
- "accept-encoding": Array [
- "gzip, deflate",
- ],
- "accept-language": Array [
- "en-US,en;q=0.9",
- ],
- "host": Array [
- "localhost:3333",
- ],
- "upgrade-insecure-requests": Array [
- "1",
- ],
- "user-agent": Array [
- "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.0 Safari/605.1.15",
- ],
- },
+ Symbol(query): Array [
+ "accept",
+ "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+ "accept-encoding",
+ "gzip, deflate",
+ "accept-language",
+ "en-US,en;q=0.9",
+ "cookie",
+ "__session=value",
+ "host",
+ "localhost:3333",
+ "upgrade-insecure-requests",
+ "1",
+ "user-agent",
+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.0 Safari/605.1.15",
+ ],
+ Symbol(context): null,
},
"method": "GET",
- "parsedURL": Url {
- "auth": null,
- "hash": null,
- "host": "localhost:3333",
- "hostname": "localhost",
- "href": "https://localhost:3333/",
- "path": "/",
- "pathname": "/",
- "port": "3333",
- "protocol": "https:",
- "query": null,
- "search": null,
- "slashes": true,
- },
+ "parsedURL": "https://localhost:3333/",
"redirect": "follow",
- "signal": undefined,
+ "signal": null,
},
}
`);
diff --git a/packages/remix-architect/server.ts b/packages/remix-architect/server.ts
index 066a396f44b..08cc52f59b0 100644
--- a/packages/remix-architect/server.ts
+++ b/packages/remix-architect/server.ts
@@ -1,9 +1,13 @@
+import type {
+ AppLoadContext,
+ ServerBuild,
+ Response as NodeResponse,
+} from "@remix-run/node";
import {
- // This has been added as a global in node 15+
- AbortController,
Headers as NodeHeaders,
Request as NodeRequest,
createRequestHandler as createRemixRequestHandler,
+ readableStreamToString,
} from "@remix-run/node";
import type {
APIGatewayProxyEventHeaders,
@@ -11,11 +15,6 @@ import type {
APIGatewayProxyHandlerV2,
APIGatewayProxyStructuredResultV2,
} from "aws-lambda";
-import type {
- AppLoadContext,
- ServerBuild,
- Response as NodeResponse,
-} from "@remix-run/node";
import { isBinaryType } from "./binaryTypes";
@@ -48,24 +47,17 @@ export function createRequestHandler({
let handleRequest = createRemixRequestHandler(build, mode);
return async (event /*, context*/) => {
- let abortController = new AbortController();
- let request = createRemixRequest(event, abortController);
+ let request = createRemixRequest(event);
let loadContext =
typeof getLoadContext === "function" ? getLoadContext(event) : undefined;
- let response = (await handleRequest(
- request as unknown as Request,
- loadContext
- )) as unknown as NodeResponse;
+ let response = (await handleRequest(request, loadContext)) as NodeResponse;
- return sendRemixResponse(response, abortController);
+ return sendRemixResponse(response);
};
}
-export function createRemixRequest(
- event: APIGatewayProxyEventV2,
- abortController?: AbortController
-): NodeRequest {
+export function createRemixRequest(event: APIGatewayProxyEventV2): NodeRequest {
let host = event.headers["x-forwarded-host"] || event.headers.host;
let search = event.rawQueryString.length ? `?${event.rawQueryString}` : "";
let scheme = process.env.ARC_SANDBOX ? "http" : "https";
@@ -83,8 +75,6 @@ export function createRemixRequest(
? Buffer.from(event.body, "base64")
: Buffer.from(event.body, "base64").toString()
: event.body,
- abortController,
- signal: abortController?.signal,
});
}
@@ -108,8 +98,7 @@ export function createRemixHeaders(
}
export async function sendRemixResponse(
- nodeResponse: NodeResponse,
- abortController: AbortController
+ nodeResponse: NodeResponse
): Promise {
let cookies: string[] = [];
@@ -126,26 +115,21 @@ export async function sendRemixResponse(
nodeResponse.headers.delete("Set-Cookie");
}
- if (abortController.signal.aborted) {
- nodeResponse.headers.set("Connection", "close");
- }
-
let contentType = nodeResponse.headers.get("Content-Type");
- let isBinary = isBinaryType(contentType);
- let body;
- let isBase64Encoded = false;
-
- if (isBinary) {
- let blob = await nodeResponse.arrayBuffer();
- body = Buffer.from(blob).toString("base64");
- isBase64Encoded = true;
- } else {
- body = await nodeResponse.text();
+ let isBase64Encoded = isBinaryType(contentType);
+ let body: string | undefined;
+
+ if (nodeResponse.body) {
+ if (isBase64Encoded) {
+ body = await readableStreamToString(nodeResponse.body, "base64");
+ } else {
+ body = await nodeResponse.text();
+ }
}
return {
statusCode: nodeResponse.status,
- headers: Object.fromEntries(nodeResponse.headers),
+ headers: Object.fromEntries(nodeResponse.headers.entries()),
cookies,
body,
isBase64Encoded,
diff --git a/packages/remix-cloudflare/index.ts b/packages/remix-cloudflare/index.ts
index 05f62f1236e..51429f1ff44 100644
--- a/packages/remix-cloudflare/index.ts
+++ b/packages/remix-cloudflare/index.ts
@@ -16,6 +16,10 @@ export {
isSession,
json,
redirect,
+ unstable_composeUploadHandlers,
+ unstable_parseMultipartFormData,
+ unstable_createMemoryUploadHandler,
+ MaxPartSizeExceededError
} from "@remix-run/server-runtime";
export type {
@@ -51,4 +55,8 @@ export type {
SessionData,
SessionIdStorageStrategy,
SessionStorage,
+ UploadHandlerPart,
+ UploadHandler,
+ MemoryUploadHandlerOptions,
+ MemoryUploadHandlerFilterArgs,
} from "@remix-run/server-runtime";
diff --git a/packages/remix-eslint-config/rules/packageExports.js b/packages/remix-eslint-config/rules/packageExports.js
index 11b82a42f65..5054e93b331 100644
--- a/packages/remix-eslint-config/rules/packageExports.js
+++ b/packages/remix-eslint-config/rules/packageExports.js
@@ -15,6 +15,9 @@ const defaultRuntimeExports = {
"isSession",
"json",
"redirect",
+ "unstable_composeUploadHandlers",
+ "unstable_createMemoryUploadHandler",
+ "unstable_parseMultipartFormData",
],
type: [
"ActionFunction",
@@ -83,8 +86,6 @@ const nodeSpecificExports = {
"Request",
"Response",
"unstable_createFileUploadHandler",
- "unstable_createMemoryUploadHandler",
- "unstable_parseMultipartFormData",
],
type: [
"HeadersInit",
diff --git a/packages/remix-express/__tests__/server-test.ts b/packages/remix-express/__tests__/server-test.ts
index 3ff8b5733b2..3d3b02b9c37 100644
--- a/packages/remix-express/__tests__/server-test.ts
+++ b/packages/remix-express/__tests__/server-test.ts
@@ -138,7 +138,8 @@ describe("express createRemixHeaders", () => {
it("handles empty headers", () => {
expect(createRemixHeaders({})).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {},
+ Symbol(query): Array [],
+ Symbol(context): null,
}
`);
});
@@ -146,11 +147,11 @@ describe("express createRemixHeaders", () => {
it("handles simple headers", () => {
expect(createRemixHeaders({ "x-foo": "bar" })).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -159,14 +160,13 @@ describe("express createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar", "x-bar": "baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -175,11 +175,11 @@ describe("express createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar, baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -188,14 +188,13 @@ describe("express createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar, baz", "x-bar": "baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -210,12 +209,13 @@ describe("express createRemixHeaders", () => {
})
).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "set-cookie": Array [
- "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
- "__other=some_other_value; Path=/; Secure; HttpOnly; MaxAge=3600; SameSite=Lax",
- ],
- },
+ Symbol(query): Array [
+ "set-cookie",
+ "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
+ "set-cookie",
+ "__other=some_other_value; Path=/; Secure; HttpOnly; MaxAge=3600; SameSite=Lax",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -237,46 +237,35 @@ describe("express createRemixRequest", () => {
expect(createRemixRequest(expressRequest)).toMatchInlineSnapshot(`
NodeRequest {
- "abortController": undefined,
"agent": undefined,
"compress": true,
"counter": 0,
"follow": 20,
+ "highWaterMark": 16384,
+ "insecureHTTPParser": false,
"size": 0,
- "timeout": 0,
Symbol(Body internals): Object {
"body": null,
+ "boundary": null,
"disturbed": false,
"error": null,
+ "size": 0,
+ "type": null,
},
Symbol(Request internals): Object {
"headers": Headers {
- Symbol(map): Object {
- "cache-control": Array [
- "max-age=300, s-maxage=3600",
- ],
- "host": Array [
- "localhost:3000",
- ],
- },
+ Symbol(query): Array [
+ "cache-control",
+ "max-age=300, s-maxage=3600",
+ "host",
+ "localhost:3000",
+ ],
+ Symbol(context): null,
},
"method": "GET",
- "parsedURL": Url {
- "auth": null,
- "hash": null,
- "host": "localhost:3000",
- "hostname": "localhost",
- "href": "http://localhost:3000/foo/bar",
- "path": "/foo/bar",
- "pathname": "/foo/bar",
- "port": "3000",
- "protocol": "http:",
- "query": null,
- "search": null,
- "slashes": true,
- },
+ "parsedURL": "http://localhost:3000/foo/bar",
"redirect": "follow",
- "signal": undefined,
+ "signal": AbortSignal {},
},
}
`);
diff --git a/packages/remix-express/server.ts b/packages/remix-express/server.ts
index 0f66d0fd434..568e28f8614 100644
--- a/packages/remix-express/server.ts
+++ b/packages/remix-express/server.ts
@@ -1,4 +1,3 @@
-import { PassThrough } from "stream";
import type * as express from "express";
import type {
AppLoadContext,
@@ -7,11 +6,11 @@ import type {
Response as NodeResponse,
} from "@remix-run/node";
import {
- // This has been added as a global in node 15+
AbortController,
createRequestHandler as createRemixRequestHandler,
Headers as NodeHeaders,
Request as NodeRequest,
+ writeReadableStreamToWritable,
} from "@remix-run/node";
/**
@@ -53,19 +52,18 @@ export function createRequestHandler({
next: express.NextFunction
) => {
try {
- let abortController = new AbortController();
- let request = createRemixRequest(req, abortController);
+ let request = createRemixRequest(req);
let loadContext =
typeof getLoadContext === "function"
? getLoadContext(req, res)
: undefined;
let response = (await handleRequest(
- request as unknown as Request,
+ request,
loadContext
- )) as unknown as NodeResponse;
+ )) as NodeResponse;
- sendRemixResponse(res, response, abortController);
+ await sendRemixResponse(res, response);
} catch (error) {
// Express doesn't support async functions, so we have to pass along the
// error manually using next().
@@ -94,32 +92,33 @@ export function createRemixHeaders(
return headers;
}
-export function createRemixRequest(
- req: express.Request,
- abortController?: AbortController
-): NodeRequest {
+export function createRemixRequest(req: express.Request): NodeRequest {
let origin = `${req.protocol}://${req.get("host")}`;
let url = new URL(req.url, origin);
+ let controller = new AbortController();
+
+ req.on("close", () => {
+ controller.abort();
+ });
+
let init: NodeRequestInit = {
method: req.method,
headers: createRemixHeaders(req.headers),
- signal: abortController?.signal,
- abortController,
+ signal: controller.signal,
};
if (req.method !== "GET" && req.method !== "HEAD") {
- init.body = req.pipe(new PassThrough({ highWaterMark: 16384 }));
+ init.body = req;
}
return new NodeRequest(url.href, init);
}
-export function sendRemixResponse(
+export async function sendRemixResponse(
res: express.Response,
- nodeResponse: NodeResponse,
- abortController: AbortController
-): void {
+ nodeResponse: NodeResponse
+): Promise {
res.statusMessage = nodeResponse.statusText;
res.status(nodeResponse.status);
@@ -129,14 +128,8 @@ export function sendRemixResponse(
}
}
- if (abortController.signal.aborted) {
- res.set("Connection", "close");
- }
-
- if (Buffer.isBuffer(nodeResponse.body)) {
- res.end(nodeResponse.body);
- } else if (nodeResponse.body?.pipe) {
- nodeResponse.body.pipe(res);
+ if (nodeResponse.body) {
+ await writeReadableStreamToWritable(nodeResponse.body, res);
} else {
res.end();
}
diff --git a/packages/remix-netlify/__tests__/server-test.ts b/packages/remix-netlify/__tests__/server-test.ts
index f47b1984813..2f344a77a0a 100644
--- a/packages/remix-netlify/__tests__/server-test.ts
+++ b/packages/remix-netlify/__tests__/server-test.ts
@@ -123,10 +123,10 @@ describe("netlify createRequestHandler", () => {
await lambdaTester(createRequestHandler({ build: undefined }))
.event(createMockEvent({ rawUrl: "http://localhost:3000" }))
.expectResolve((res) => {
- expect(res.multiValueHeaders["X-Time-Of-Year"]).toEqual([
+ expect(res.multiValueHeaders["x-time-of-year"]).toEqual([
"most wonderful",
]);
- expect(res.multiValueHeaders["Set-Cookie"]).toEqual([
+ expect(res.multiValueHeaders["set-cookie"]).toEqual([
"first=one; Expires=0; Path=/; HttpOnly; Secure; SameSite=Lax",
"second=two; MaxAge=1209600; Path=/; HttpOnly; Secure; SameSite=Lax",
"third=three; Expires=Wed, 21 Oct 2015 07:28:00 GMT; Path=/; HttpOnly; Secure; SameSite=Lax",
@@ -141,7 +141,8 @@ describe("netlify createRemixHeaders", () => {
it("handles empty headers", () => {
expect(createRemixHeaders({})).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {},
+ Symbol(query): Array [],
+ Symbol(context): null,
}
`);
});
@@ -149,11 +150,11 @@ describe("netlify createRemixHeaders", () => {
it("handles simple headers", () => {
expect(createRemixHeaders({ "x-foo": ["bar"] })).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -162,14 +163,13 @@ describe("netlify createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": ["bar"], "x-bar": ["baz"] }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -178,12 +178,13 @@ describe("netlify createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": ["bar", "baz"] }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar",
- "baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-foo",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -192,15 +193,15 @@ describe("netlify createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": ["bar", "baz"], "x-bar": ["baz"] }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar",
- "baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-foo",
+ "baz",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -212,19 +213,20 @@ describe("netlify createRemixHeaders", () => {
"__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
"__other=some_other_value; Path=/; Secure; HttpOnly; Expires=Wed, 21 Oct 2015 07:28:00 GMT; SameSite=Lax",
],
+
"x-something-else": ["true"],
})
).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "Cookie": Array [
- "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
- "__other=some_other_value; Path=/; Secure; HttpOnly; Expires=Wed, 21 Oct 2015 07:28:00 GMT; SameSite=Lax",
- ],
- "x-something-else": Array [
- "true",
- ],
- },
+ Symbol(query): Array [
+ "cookie",
+ "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
+ "cookie",
+ "__other=some_other_value; Path=/; Secure; HttpOnly; Expires=Wed, 21 Oct 2015 07:28:00 GMT; SameSite=Lax",
+ "x-something-else",
+ "true",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -243,44 +245,35 @@ describe("netlify createRemixRequest", () => {
)
).toMatchInlineSnapshot(`
NodeRequest {
- "abortController": undefined,
"agent": undefined,
"compress": true,
"counter": 0,
"follow": 20,
+ "highWaterMark": 16384,
+ "insecureHTTPParser": false,
"size": 0,
- "timeout": 0,
Symbol(Body internals): Object {
"body": null,
+ "boundary": null,
"disturbed": false,
"error": null,
+ "size": 0,
+ "type": null,
},
Symbol(Request internals): Object {
"headers": Headers {
- Symbol(map): Object {
- "Cookie": Array [
- "__session=value",
- "__other=value",
- ],
- },
+ Symbol(query): Array [
+ "cookie",
+ "__session=value",
+ "cookie",
+ "__other=value",
+ ],
+ Symbol(context): null,
},
"method": "GET",
- "parsedURL": Url {
- "auth": null,
- "hash": null,
- "host": "localhost:3000",
- "hostname": "localhost",
- "href": "http://localhost:3000/",
- "path": "/",
- "pathname": "/",
- "port": "3000",
- "protocol": "http:",
- "query": null,
- "search": null,
- "slashes": true,
- },
+ "parsedURL": "http://localhost:3000/",
"redirect": "follow",
- "signal": undefined,
+ "signal": null,
},
}
`);
diff --git a/packages/remix-netlify/server.ts b/packages/remix-netlify/server.ts
index f1a48aaa319..97eb953a714 100644
--- a/packages/remix-netlify/server.ts
+++ b/packages/remix-netlify/server.ts
@@ -1,9 +1,8 @@
import {
- // This has been added as a global in node 15+
- AbortController,
createRequestHandler as createRemixRequestHandler,
Headers as NodeHeaders,
Request as NodeRequest,
+ readableStreamToString,
} from "@remix-run/node";
import type {
Handler,
@@ -14,8 +13,8 @@ import type {
import type {
AppLoadContext,
ServerBuild,
- Response as NodeResponse,
RequestInit as NodeRequestInit,
+ Response as NodeResponse,
} from "@remix-run/node";
import { isBinaryType } from "./binaryTypes";
@@ -46,26 +45,19 @@ export function createRequestHandler({
let handleRequest = createRemixRequestHandler(build, mode);
return async (event, context) => {
- let abortController = new AbortController();
- let request = createRemixRequest(event, abortController);
+ let request = createRemixRequest(event);
let loadContext =
typeof getLoadContext === "function"
? getLoadContext(event, context)
: undefined;
- let response = (await handleRequest(
- request as unknown as Request,
- loadContext
- )) as unknown as NodeResponse;
+ let response = (await handleRequest(request, loadContext)) as NodeResponse;
- return sendRemixResponse(response, abortController);
+ return sendRemixResponse(response);
};
}
-export function createRemixRequest(
- event: HandlerEvent,
- abortController?: AbortController
-): NodeRequest {
+export function createRemixRequest(event: HandlerEvent): NodeRequest {
let url: URL;
if (process.env.NODE_ENV !== "development") {
@@ -79,8 +71,6 @@ export function createRemixRequest(
let init: NodeRequestInit = {
method: event.httpMethod,
headers: createRemixHeaders(event.multiValueHeaders),
- abortController,
- signal: abortController?.signal,
};
if (event.httpMethod !== "GET" && event.httpMethod !== "HEAD" && event.body) {
@@ -139,29 +129,25 @@ function getRawPath(event: HandlerEvent): string {
}
export async function sendRemixResponse(
- nodeResponse: NodeResponse,
- abortController: AbortController
+ nodeResponse: NodeResponse
): Promise {
- if (abortController.signal.aborted) {
- nodeResponse.headers.set("Connection", "close");
- }
-
let contentType = nodeResponse.headers.get("Content-Type");
- let isBinary = isBinaryType(contentType);
- let body;
- let isBase64Encoded = false;
-
- if (isBinary) {
- let blob = await nodeResponse.arrayBuffer();
- body = Buffer.from(blob).toString("base64");
- isBase64Encoded = true;
- } else {
- body = await nodeResponse.text();
+ let body: string | undefined;
+ let isBase64Encoded = isBinaryType(contentType);
+
+ if (nodeResponse.body) {
+ if (isBase64Encoded) {
+ body = await readableStreamToString(nodeResponse.body, "base64");
+ } else {
+ body = await nodeResponse.text();
+ }
}
+ let multiValueHeaders = nodeResponse.headers.raw();
+
return {
statusCode: nodeResponse.status,
- multiValueHeaders: nodeResponse.headers.raw(),
+ multiValueHeaders,
body,
isBase64Encoded,
};
diff --git a/packages/remix-node/__tests__/assets/test.txt b/packages/remix-node/__tests__/assets/test.txt
new file mode 100644
index 00000000000..30f51a3fba5
--- /dev/null
+++ b/packages/remix-node/__tests__/assets/test.txt
@@ -0,0 +1 @@
+hello, world!
\ No newline at end of file
diff --git a/packages/remix-node/__tests__/fetch-test.ts b/packages/remix-node/__tests__/fetch-test.ts
index e1dda7584d7..9468160d65a 100644
--- a/packages/remix-node/__tests__/fetch-test.ts
+++ b/packages/remix-node/__tests__/fetch-test.ts
@@ -1,7 +1,6 @@
import { PassThrough } from "stream";
import { Request } from "../fetch";
-import { createMemoryUploadHandler } from "../upload/memoryUploadHandler";
let test = {
source: [
@@ -70,11 +69,10 @@ let test = {
};
describe("Request", () => {
- let uploadHandler = createMemoryUploadHandler({});
-
it("clones", async () => {
let body = new PassThrough();
test.source.forEach((chunk) => body.write(chunk));
+ body.end();
let req = new Request("http://test.com", {
method: "post",
@@ -85,10 +83,8 @@ describe("Request", () => {
});
let cloned = req.clone();
- expect(Object.getPrototypeOf(req)).toBe(Object.getPrototypeOf(cloned));
-
- let formData = await req.formData(uploadHandler);
- let clonedFormData = await cloned.formData(uploadHandler);
+ let formData = await req.formData();
+ let clonedFormData = await cloned.formData();
expect(formData.get("file_name_0")).toBe("super alpha file");
expect(clonedFormData.get("file_name_0")).toBe("super alpha file");
@@ -109,3 +105,9 @@ describe("Request", () => {
expect(file.size).toBe(1023);
});
});
+
+describe("fetch", () => {
+ // fetch a gzip-encoded json blob
+ // call res.json() and make sure it's decoded properly
+ it.todo("decodes gzip encoded body");
+});
diff --git a/packages/remix-node/__tests__/fileUploadHandler-test.ts b/packages/remix-node/__tests__/fileUploadHandler-test.ts
new file mode 100644
index 00000000000..0d52c6eae7e
--- /dev/null
+++ b/packages/remix-node/__tests__/fileUploadHandler-test.ts
@@ -0,0 +1,107 @@
+import * as fs from "fs";
+import * as path from "path";
+import { ReadableStream } from "@remix-run/web-stream";
+
+import { NodeOnDiskFile } from "../upload/fileUploadHandler";
+import { readableStreamToString } from "../stream";
+
+beforeAll(() => {
+ global.ReadableStream = ReadableStream;
+});
+
+describe("NodeOnDiskFile", () => {
+ let filepath = path.resolve(__dirname, "assets/test.txt");
+ let size = fs.statSync(filepath).size;
+ let contents = fs.readFileSync(filepath, "utf-8");
+ let file: NodeOnDiskFile;
+ beforeEach(() => {
+ file = new NodeOnDiskFile(filepath, "text/plain");
+ });
+
+ it("can read file as text", async () => {
+ expect(await file.text()).toBe(contents);
+ });
+
+ it("can get an arrayBuffer", async () => {
+ let buffer = await file.arrayBuffer();
+ expect(buffer.byteLength).toBe(size);
+ expect(buffer).toEqual(Buffer.from(contents));
+ });
+
+ it("can use stream", async () => {
+ expect(await readableStreamToString(file.stream() as any)).toBe(contents);
+ });
+
+ it("can slice file and change type", async () => {
+ let sliced = await file.slice(1, 5, "text/rofl");
+ expect(sliced.type).toBe("text/rofl");
+ expect(await sliced.text()).toBe(contents.slice(1, 5));
+ });
+
+ it("can slice file and get text", async () => {
+ let sliced = await file.slice(1, 5);
+ expect(await sliced.text()).toBe(contents.slice(1, 5));
+ });
+
+ it("can slice file twice and get text", async () => {
+ let sliced = (await file.slice(1, 5)).slice(1, 2);
+ expect(await sliced.text()).toBe(contents.slice(1, 5).slice(1, 2));
+ });
+
+ it("can sice file and get an arrayBuffer", async () => {
+ let sliced = await file.slice(1, 5);
+ let slicedRes = contents.slice(1, 5);
+ let buffer = await sliced.arrayBuffer();
+ expect(buffer.byteLength).toBe(slicedRes.length);
+ expect(buffer).toEqual(Buffer.from(slicedRes));
+ });
+
+ it("can slice file and use stream", async () => {
+ let sliced = await file.slice(1, 5);
+ let slicedRes = contents.slice(1, 5);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice file with negative start and no end", async () => {
+ let sliced = await file.slice(-2);
+ let slicedRes = contents.slice(-2);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice file with negative start and negative end", async () => {
+ let sliced = await file.slice(-3, -1);
+ let slicedRes = contents.slice(-3, -1);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice file with negative start and negative end twice", async () => {
+ let sliced = await file.slice(-3, -1).slice(1, -1);
+ let slicedRes = contents.slice(-3, -1).slice(1, -1);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice file with start and negative end", async () => {
+ let sliced = await file.slice(1, -2);
+ let slicedRes = contents.slice(1, -2);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice file with negaive start and end", async () => {
+ let sliced = await file.slice(-3, 1);
+ let slicedRes = contents.slice(-3, 1);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+
+ it("can slice oob", async () => {
+ let sliced = await file.slice(0, 10000);
+ let slicedRes = contents.slice(0, 10000);
+ expect(sliced.size).toBe(slicedRes.length);
+ expect(await sliced.text()).toBe(slicedRes);
+ });
+});
diff --git a/packages/remix-node/__tests__/formData-test.ts b/packages/remix-node/__tests__/formData-test.ts
deleted file mode 100644
index 1f483ef44fb..00000000000
--- a/packages/remix-node/__tests__/formData-test.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { Blob, File } from "@web-std/file";
-
-import { FormData as NodeFormData } from "../formData";
-
-describe("FormData", () => {
- it("allows for mix of set and append", () => {
- let formData = new NodeFormData();
- formData.set("single", "heyo");
- formData.append("multi", "one");
- formData.append("multi", "two");
-
- let results = [];
- for (let [k, v] of formData) results.push([k, v]);
- expect(results).toEqual([
- ["single", "heyo"],
- ["multi", "one"],
- ["multi", "two"],
- ]);
- });
-
- it("restores correctly empty string values with get method", () => {
- let formData = new NodeFormData();
- formData.set("single", "");
- expect(formData.get("single")).toBe("");
- });
-
- it("allows for mix of set and append with blobs and files", () => {
- let formData = new NodeFormData();
- formData.set("single", new Blob([]));
- formData.append("multi", new Blob([]));
- formData.append("multi", new File([], "test.txt"));
-
- expect(formData.getAll("single")).toHaveLength(1);
- expect(formData.getAll("multi")).toHaveLength(2);
- });
-});
diff --git a/packages/remix-node/__tests__/parseMultipartFormData-test.ts b/packages/remix-node/__tests__/parseMultipartFormData-test.ts
deleted file mode 100644
index 7cd02866d1f..00000000000
--- a/packages/remix-node/__tests__/parseMultipartFormData-test.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import { Blob, File } from "@web-std/file";
-
-import { Request as NodeRequest } from "../fetch";
-import { FormData as NodeFormData } from "../formData";
-import { internalParseFormData } from "../parseMultipartFormData";
-import { createMemoryUploadHandler } from "../upload/memoryUploadHandler";
-
-describe("internalParseFormData", () => {
- it("plays nice with node-fetch", async () => {
- let formData = new NodeFormData();
- formData.set("a", "value");
- formData.set("blob", new Blob(["blob"]), "blob.txt");
- formData.set("file", new File(["file"], "file.txt"));
-
- let req = new NodeRequest("https://test.com", {
- method: "post",
- body: formData as any,
- });
-
- let uploadHandler = createMemoryUploadHandler({});
- let parsedFormData = await internalParseFormData(
- req.headers.get("Content-Type"),
- req.body as any,
- undefined,
- uploadHandler
- );
-
- expect(parsedFormData.get("a")).toBe("value");
- let blob = parsedFormData.get("blob") as Blob;
- expect(await blob.text()).toBe("blob");
- let file = parsedFormData.get("file") as File;
- expect(file.name).toBe("file.txt");
- expect(await file.text()).toBe("file");
- });
-});
diff --git a/packages/remix-node/fetch.ts b/packages/remix-node/fetch.ts
index d6f2dc7419c..ce42c4ed8d3 100644
--- a/packages/remix-node/fetch.ts
+++ b/packages/remix-node/fetch.ts
@@ -1,139 +1,76 @@
import type { Readable } from "stream";
-import { PassThrough } from "stream";
-import type AbortController from "abort-controller";
-import FormStream from "form-data";
-import type { RequestInfo, RequestInit, Response } from "node-fetch";
-import nodeFetch, { Request as BaseNodeRequest } from "node-fetch";
-
-import { FormData as NodeFormData, isFile } from "./formData";
-import type { UploadHandler } from "./formData";
-import { internalParseFormData } from "./parseMultipartFormData";
-
-export type { HeadersInit, RequestInfo, ResponseInit } from "node-fetch";
-export { Headers, Response } from "node-fetch";
-
-function formDataToStream(formData: NodeFormData): FormStream {
- let formStream = new FormStream();
-
- function toNodeStream(input: any) {
- // The input is either a Node stream or a web stream, if it has
- // a `on` method it's a node stream so we can just return it
- if (typeof input?.on === "function") {
- return input;
- }
-
- let passthrough = new PassThrough();
- let stream = input as ReadableStream;
- let reader = stream.getReader();
- reader
- .read()
- .then(async ({ done, value }) => {
- while (!done) {
- passthrough.push(value);
- ({ done, value } = await reader.read());
- }
- passthrough.push(null);
- })
- .catch((error) => {
- passthrough.emit("error", error);
- });
-
- return passthrough;
+import {
+ fetch as webFetch,
+ Headers as WebHeaders,
+ Request as WebRequest,
+ Response as WebResponse,
+} from "@remix-run/web-fetch";
+export { FormData } from "@remix-run/web-fetch";
+export { File, Blob } from "@remix-run/web-file";
+
+type NodeHeadersInit = ConstructorParameters[0];
+type NodeResponseInit = NonNullable<
+ ConstructorParameters[1]
+>;
+type NodeRequestInfo =
+ | ConstructorParameters[0]
+ | NodeRequest;
+type NodeRequestInit = Omit<
+ NonNullable[1]>,
+ "body"
+> & {
+ body?:
+ | NonNullable[1]>["body"]
+ | Readable;
+};
+
+export type {
+ NodeHeadersInit as HeadersInit,
+ NodeRequestInfo as RequestInfo,
+ NodeRequestInit as RequestInit,
+ NodeResponseInit as ResponseInit,
+};
+
+class NodeRequest extends WebRequest {
+ constructor(info: NodeRequestInfo, init?: NodeRequestInit) {
+ super(info, init as RequestInit);
}
- for (let [key, value] of formData.entries()) {
- if (typeof value === "string") {
- formStream.append(key, value);
- } else if (isFile(value)) {
- let stream = toNodeStream(value.stream());
- formStream.append(key, stream, {
- filename: value.name,
- contentType: value.type,
- knownLength: value.size,
- });
- } else {
- let file = value as File;
- let stream = toNodeStream(file.stream());
- formStream.append(key, stream, {
- filename: "unknown",
- });
- }
+ public get headers(): WebHeaders {
+ return super.headers as WebHeaders;
}
- return formStream;
-}
-
-interface NodeRequestInit extends RequestInit {
- abortController?: AbortController;
-}
-
-class NodeRequest extends BaseNodeRequest {
- private abortController?: AbortController;
-
- constructor(input: RequestInfo, init?: NodeRequestInit | undefined) {
- if (init?.body instanceof NodeFormData) {
- init = {
- ...init,
- body: formDataToStream(init.body),
- };
- }
-
- super(input, init);
-
- let anyInput = input as any;
- let anyInit = init as any;
-
- this.abortController =
- anyInput?.abortController || anyInit?.abortController;
+ public clone(): NodeRequest {
+ return super.clone() as NodeRequest;
}
+}
- async formData(uploadHandler?: UploadHandler): Promise {
- let contentType = this.headers.get("Content-Type");
- if (
- contentType &&
- (/application\/x-www-form-urlencoded/.test(contentType) ||
- /multipart\/form-data/.test(contentType))
- ) {
- return await internalParseFormData(
- contentType,
- this.body as Readable,
- this.abortController,
- uploadHandler
- );
- }
-
- throw new Error("Invalid MIME type");
+class NodeResponse extends WebResponse {
+ public get headers(): WebHeaders {
+ return super.headers as WebHeaders;
}
- clone(): NodeRequest {
- return new NodeRequest(this);
+ public clone(): NodeResponse {
+ return super.clone() as NodeResponse;
}
}
-export { NodeRequest as Request, NodeRequestInit as RequestInit };
-
-/**
- * A `fetch` function for node that matches the web Fetch API. Based on
- * `node-fetch`.
- *
- * @see https://github.com/node-fetch/node-fetch
- * @see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API
- */
-export function fetch(
- input: RequestInfo,
- init?: RequestInit
-): Promise {
- init = { compress: false, ...init };
-
- if (init?.body instanceof NodeFormData) {
- init = {
- ...init,
- body: formDataToStream(init.body),
- };
- }
-
- // Default to { compress: false } so responses can be proxied through more
- // easily in loaders. Otherwise the response stream encoding will not match
- // the Content-Encoding response header.
- return nodeFetch(input, init);
-}
+export {
+ WebHeaders as Headers,
+ NodeRequest as Request,
+ NodeResponse as Response,
+};
+
+export const fetch: typeof webFetch = (
+ info: NodeRequestInfo,
+ init?: NodeRequestInit
+) => {
+ init = {
+ // Disable compression handling so people can return the result of a fetch
+ // directly in the loader without messing with the Content-Encoding header.
+ compress: false,
+ ...init,
+ };
+
+ return webFetch(info, init as RequestInit);
+};
diff --git a/packages/remix-node/formData.ts b/packages/remix-node/formData.ts
deleted file mode 100644
index 2e4cb8ceb09..00000000000
--- a/packages/remix-node/formData.ts
+++ /dev/null
@@ -1,125 +0,0 @@
-import type { Readable } from "stream";
-
-export type UploadHandlerArgs = {
- name: string;
- stream: Readable;
- filename: string;
- encoding: string;
- mimetype: string;
-};
-
-export type UploadHandler = (
- args: UploadHandlerArgs
-) => Promise;
-
-function isBlob(value: any): value is Blob {
- return (
- typeof value === "object" &&
- (typeof value.arrayBuffer === "function" ||
- typeof value.size === "number" ||
- typeof value.slice === "function" ||
- typeof value.stream === "function" ||
- typeof value.text === "function" ||
- typeof value.type === "string")
- );
-}
-
-export function isFile(blob: Blob): blob is File {
- let file = blob as File;
- return typeof file.name === "string";
-}
-
-class NodeFormData implements FormData {
- private _fields: Record;
-
- constructor(form?: any) {
- if (typeof form !== "undefined") {
- throw new Error("Form data on the server is not supported.");
- }
- this._fields = {};
- }
-
- append(name: string, value: string | Blob, fileName?: string): void {
- if (typeof value !== "string" && !isBlob(value)) {
- throw new Error("formData.append can only accept a string or Blob");
- }
-
- this._fields[name] = this._fields[name] || [];
- if (typeof value === "string" || isFile(value)) {
- this._fields[name].push(value);
- } else {
- this._fields[name].push(new File([value], fileName || "unknown"));
- }
- }
-
- delete(name: string): void {
- delete this._fields[name];
- }
-
- get(name: string): FormDataEntryValue | null {
- let arr = this._fields[name];
- return arr?.slice(-1)[0] ?? null;
- }
-
- getAll(name: string): FormDataEntryValue[] {
- let arr = this._fields[name];
- return arr || [];
- }
-
- has(name: string): boolean {
- return name in this._fields;
- }
-
- set(name: string, value: string | Blob, fileName?: string): void {
- if (typeof value !== "string" && !isBlob(value)) {
- throw new Error("formData.set can only accept a string or Blob");
- }
-
- if (typeof value === "string" || isFile(value)) {
- this._fields[name] = [value];
- } else {
- this._fields[name] = [new File([value], fileName || "unknown")];
- }
- }
-
- forEach(
- callbackfn: (
- value: FormDataEntryValue,
- key: string,
- parent: FormData
- ) => void,
- thisArg?: any
- ): void {
- Object.entries(this._fields).forEach(([name, values]) => {
- values.forEach((value) => callbackfn(value, name, thisArg), thisArg);
- });
- }
-
- entries(): IterableIterator<[string, FormDataEntryValue]> {
- return Object.entries(this._fields)
- .reduce((entries, [name, values]) => {
- values.forEach((value) => entries.push([name, value]));
- return entries;
- }, [] as [string, FormDataEntryValue][])
- .values();
- }
-
- keys(): IterableIterator {
- return Object.keys(this._fields).values();
- }
-
- values(): IterableIterator {
- return Object.entries(this._fields)
- .reduce((results, [name, values]) => {
- values.forEach((value) => results.push(value));
- return results;
- }, [] as FormDataEntryValue[])
- .values();
- }
-
- *[Symbol.iterator](): IterableIterator<[string, FormDataEntryValue]> {
- yield* this.entries();
- }
-}
-
-export { NodeFormData as FormData };
diff --git a/packages/remix-node/globals.ts b/packages/remix-node/globals.ts
index dbb00e88be1..2732cbe17c4 100644
--- a/packages/remix-node/globals.ts
+++ b/packages/remix-node/globals.ts
@@ -1,13 +1,18 @@
-import { Blob as NodeBlob, File as NodeFile } from "@web-std/file";
+import {
+ ReadableStream as NodeReadableStream,
+ WritableStream as NodeWritableStream,
+} from "@remix-run/web-stream";
import { atob, btoa } from "./base64";
import {
+ Blob as NodeBlob,
+ File as NodeFile,
+ FormData as NodeFormData,
Headers as NodeHeaders,
Request as NodeRequest,
Response as NodeResponse,
fetch as nodeFetch,
} from "./fetch";
-import { FormData as NodeFormData } from "./formData";
declare global {
namespace NodeJS {
@@ -27,6 +32,9 @@ declare global {
Response: typeof Response;
fetch: typeof fetch;
FormData: typeof FormData;
+
+ ReadableStream: typeof ReadableStream;
+ WritableStream: typeof WritableStream;
}
}
}
@@ -35,12 +43,15 @@ export function installGlobals() {
global.atob = atob;
global.btoa = btoa;
- global.Blob = NodeBlob as unknown as typeof Blob;
- global.File = NodeFile as unknown as typeof File;
+ global.Blob = NodeBlob;
+ global.File = NodeFile;
- global.Headers = NodeHeaders as unknown as typeof Headers;
- global.Request = NodeRequest as unknown as typeof Request;
+ global.Headers = NodeHeaders as typeof Headers;
+ global.Request = NodeRequest as typeof Request;
global.Response = NodeResponse as unknown as typeof Response;
- global.fetch = nodeFetch as unknown as typeof fetch;
- global.FormData = NodeFormData as unknown as typeof FormData;
+ global.fetch = nodeFetch as typeof fetch;
+ global.FormData = NodeFormData;
+
+ global.ReadableStream = NodeReadableStream;
+ global.WritableStream = NodeWritableStream;
}
diff --git a/packages/remix-node/index.ts b/packages/remix-node/index.ts
index 5af593cc30d..5c3186554af 100644
--- a/packages/remix-node/index.ts
+++ b/packages/remix-node/index.ts
@@ -10,22 +10,16 @@ export type {
RequestInit,
ResponseInit,
} from "./fetch";
-export { Headers, Request, Response, fetch } from "./fetch";
-
-export { FormData } from "./formData";
-export type { UploadHandler, UploadHandlerArgs } from "./formData";
+export { fetch, Headers, Request, Response, FormData } from "./fetch";
export { installGlobals } from "./globals";
-export { parseMultipartFormData as unstable_parseMultipartFormData } from "./parseMultipartFormData";
-
export { createFileSessionStorage } from "./sessions/fileStorage";
export {
createFileUploadHandler as unstable_createFileUploadHandler,
NodeOnDiskFile,
} from "./upload/fileUploadHandler";
-export { createMemoryUploadHandler as unstable_createMemoryUploadHandler } from "./upload/memoryUploadHandler";
export {
createCookie,
@@ -34,6 +28,13 @@ export {
createSessionStorage,
} from "./implementations";
+export {
+ createReadableStreamFromReadable,
+ readableStreamToString,
+ writeAsyncIterableToWritable,
+ writeReadableStreamToWritable,
+} from "./stream";
+
export {
createRequestHandler,
createSession,
@@ -41,6 +42,10 @@ export {
isSession,
json,
redirect,
+ unstable_composeUploadHandlers,
+ unstable_parseMultipartFormData,
+ unstable_createMemoryUploadHandler,
+ MaxPartSizeExceededError,
} from "@remix-run/server-runtime";
export type {
@@ -76,4 +81,8 @@ export type {
SessionData,
SessionIdStorageStrategy,
SessionStorage,
+ UploadHandlerPart,
+ UploadHandler,
+ MemoryUploadHandlerOptions,
+ MemoryUploadHandlerFilterArgs,
} from "@remix-run/server-runtime";
diff --git a/packages/remix-node/magicExports/remix.ts b/packages/remix-node/magicExports/remix.ts
index 8f637aad858..c20a24a48df 100644
--- a/packages/remix-node/magicExports/remix.ts
+++ b/packages/remix-node/magicExports/remix.ts
@@ -13,4 +13,4 @@ export {
unstable_parseMultipartFormData,
} from "@remix-run/node";
-export type { UploadHandler, UploadHandlerArgs } from "@remix-run/node";
+export type { UploadHandler, UploadHandlerPart } from "@remix-run/node";
diff --git a/packages/remix-node/package.json b/packages/remix-node/package.json
index 8daaf4f9628..81d36d9761d 100644
--- a/packages/remix-node/package.json
+++ b/packages/remix-node/package.json
@@ -13,19 +13,16 @@
},
"dependencies": {
"@remix-run/server-runtime": "1.4.3",
- "@types/busboy": "^0.3.1",
- "@types/node-fetch": "^2.5.12",
- "@web-std/file": "^3.0.0",
+ "@remix-run/web-fetch": "^4.1.3",
+ "@remix-run/web-file": "^3.0.2",
+ "@remix-run/web-stream": "^1.0.3",
+ "@web3-storage/multipart-parser": "^1.0.0",
"abort-controller": "^3.0.0",
- "blob-stream": "^0.1.3",
- "busboy": "^0.3.1",
"cookie-signature": "^1.1.0",
- "form-data": "^4.0.0",
- "node-fetch": "^2.6.1",
- "source-map-support": "^0.5.21"
+ "source-map-support": "^0.5.21",
+ "stream-slice": "^0.1.2"
},
"devDependencies": {
- "@types/blob-stream": "^0.1.30",
"@types/cookie-signature": "^1.0.3",
"@types/source-map-support": "^0.5.4"
},
diff --git a/packages/remix-node/parseMultipartFormData.ts b/packages/remix-node/parseMultipartFormData.ts
deleted file mode 100644
index cbddb069cd7..00000000000
--- a/packages/remix-node/parseMultipartFormData.ts
+++ /dev/null
@@ -1,116 +0,0 @@
-import { Readable } from "stream";
-import Busboy from "busboy";
-
-import type { Request as NodeRequest } from "./fetch";
-import type { UploadHandler } from "./formData";
-import { FormData as NodeFormData } from "./formData";
-
-/**
- * Allows you to handle multipart forms (file uploads) for your app.
- *
- * @see https://remix.run/api/remix#parsemultipartformdata-node
- */
-export function parseMultipartFormData(
- request: Request,
- uploadHandler: UploadHandler
-) {
- return (request as unknown as NodeRequest).formData(uploadHandler);
-}
-
-export async function internalParseFormData(
- contentType: string,
- body: string | Buffer | Readable,
- abortController?: AbortController,
- uploadHandler?: UploadHandler
-) {
- let formData = new NodeFormData();
- let fileWorkQueue: Promise[] = [];
-
- let stream: Readable;
- if (typeof body === "string" || Buffer.isBuffer(body)) {
- stream = Readable.from(body);
- } else {
- stream = body;
- }
-
- await new Promise(async (resolve, reject) => {
- try {
- let busboy = new Busboy({
- highWaterMark: 2 * 1024 * 1024,
- headers: {
- "content-type": contentType,
- },
- });
-
- let aborted = false;
- function abort(error?: Error) {
- if (aborted) return;
- aborted = true;
-
- stream.unpipe();
- stream.removeAllListeners();
- busboy.removeAllListeners();
-
- abortController?.abort();
- reject(error || new Error("failed to parse form data"));
- }
-
- busboy.on("field", (name, value) => {
- formData.append(name, value);
- });
-
- busboy.on("file", (name, filestream, filename, encoding, mimetype) => {
- if (uploadHandler) {
- fileWorkQueue.push(
- (async () => {
- try {
- let value = await uploadHandler({
- name,
- stream: filestream,
- filename,
- encoding,
- mimetype,
- });
-
- if (typeof value !== "undefined") {
- formData.append(name, value);
- }
- } catch (error: any) {
- // Emit error to busboy to bail early if possible
- busboy.emit("error", error);
- // It's possible that the handler is doing stuff and fails
- // *after* busboy has finished. Rethrow the error for surfacing
- // in the Promise.all(fileWorkQueue) below.
- throw error;
- } finally {
- filestream.resume();
- }
- })()
- );
- } else {
- filestream.resume();
- }
-
- if (!uploadHandler) {
- console.warn(
- `Tried to parse multipart file upload for field "${name}" but no uploadHandler was provided.` +
- " Read more here: https://remix.run/api/remix#parseMultipartFormData-node"
- );
- }
- });
-
- stream.on("error", abort);
- stream.on("aborted", abort);
- busboy.on("error", abort);
- busboy.on("finish", resolve);
-
- stream.pipe(busboy);
- } catch (err) {
- reject(err);
- }
- });
-
- await Promise.all(fileWorkQueue);
-
- return formData;
-}
diff --git a/packages/remix-node/stream.ts b/packages/remix-node/stream.ts
new file mode 100644
index 00000000000..6335cf92d0d
--- /dev/null
+++ b/packages/remix-node/stream.ts
@@ -0,0 +1,182 @@
+import type { Readable, Writable } from "stream";
+import { Stream } from "stream";
+
+export async function writeReadableStreamToWritable(
+ stream: ReadableStream,
+ writable: Writable
+) {
+ let reader = stream.getReader();
+
+ async function read() {
+ let { done, value } = await reader.read();
+
+ if (done) {
+ writable.end();
+ return;
+ }
+
+ writable.write(value);
+
+ await read();
+ }
+
+ try {
+ await read();
+ } catch (error: any) {
+ writable.destroy(error);
+ throw error;
+ }
+}
+
+export async function writeAsyncIterableToWritable(
+ iterable: AsyncIterable,
+ writable: Writable
+) {
+ try {
+ for await (let chunk of iterable) {
+ writable.write(chunk);
+ }
+ writable.end();
+ } catch (error: any) {
+ writable.destroy(error);
+ throw error;
+ }
+}
+
+export async function readableStreamToString(
+ stream: ReadableStream,
+ encoding?: BufferEncoding
+) {
+ let reader = stream.getReader();
+ let chunks: Uint8Array[] = [];
+
+ async function read() {
+ let { done, value } = await reader.read();
+
+ if (done) {
+ return;
+ } else if (value) {
+ chunks.push(value);
+ }
+
+ await read();
+ }
+
+ await read();
+
+ return Buffer.concat(chunks).toString(encoding);
+}
+
+export const createReadableStreamFromReadable = (
+ source: Readable & { readableHighWaterMark?: number }
+) => {
+ let pump = new StreamPump(source);
+ let stream = new ReadableStream(pump, pump);
+ return stream;
+};
+
+class StreamPump {
+ public highWaterMark: number;
+ public accumalatedSize: number;
+ private stream: Stream & {
+ readableHighWaterMark?: number;
+ readable?: boolean;
+ resume?: () => void;
+ pause?: () => void;
+ destroy?: (error?: Error) => void;
+ };
+ private controller?: ReadableStreamController;
+
+ constructor(
+ stream: Stream & {
+ readableHighWaterMark?: number;
+ readable?: boolean;
+ resume?: () => void;
+ pause?: () => void;
+ destroy?: (error?: Error) => void;
+ }
+ ) {
+ this.highWaterMark =
+ stream.readableHighWaterMark ||
+ new Stream.Readable().readableHighWaterMark;
+ this.accumalatedSize = 0;
+ this.stream = stream;
+ this.enqueue = this.enqueue.bind(this);
+ this.error = this.error.bind(this);
+ this.close = this.close.bind(this);
+ }
+
+ size(chunk: Uint8Array) {
+ return chunk?.byteLength || 0;
+ }
+
+ start(controller: ReadableStreamController) {
+ this.controller = controller;
+ this.stream.on("data", this.enqueue);
+ this.stream.once("error", this.error);
+ this.stream.once("end", this.close);
+ this.stream.once("close", this.close);
+ }
+
+ pull() {
+ this.resume();
+ }
+
+ cancel(reason?: Error) {
+ if (this.stream.destroy) {
+ this.stream.destroy(reason);
+ }
+
+ this.stream.off("data", this.enqueue);
+ this.stream.off("error", this.error);
+ this.stream.off("end", this.close);
+ this.stream.off("close", this.close);
+ }
+
+ enqueue(chunk: Uint8Array | string) {
+ if (this.controller) {
+ try {
+ let bytes = chunk instanceof Uint8Array ? chunk : Buffer.from(chunk);
+
+ let available = (this.controller.desiredSize || 0) - bytes.byteLength;
+ this.controller.enqueue(bytes);
+ if (available <= 0) {
+ this.pause();
+ }
+ } catch (error: any) {
+ this.controller.error(
+ new Error(
+ "Could not create Buffer, chunk must be of type string or an instance of Buffer, ArrayBuffer, or Array or an Array-like Object"
+ )
+ );
+ this.cancel();
+ }
+ }
+ }
+
+ pause() {
+ if (this.stream.pause) {
+ this.stream.pause();
+ }
+ }
+
+ resume() {
+ if (this.stream.readable && this.stream.resume) {
+ this.stream.resume();
+ }
+ }
+
+ close() {
+ if (this.controller) {
+ this.controller.close();
+ delete this.controller;
+ }
+ }
+
+ error(error: Error) {
+ if (this.controller) {
+ this.controller.error(error);
+ delete this.controller;
+ }
+ }
+}
diff --git a/packages/remix-node/upload/fileUploadHandler.ts b/packages/remix-node/upload/fileUploadHandler.ts
index 55d9068fb57..ad2836ab9c2 100644
--- a/packages/remix-node/upload/fileUploadHandler.ts
+++ b/packages/remix-node/upload/fileUploadHandler.ts
@@ -1,22 +1,31 @@
import { randomBytes } from "crypto";
-import { createReadStream, createWriteStream } from "fs";
-import { rm, mkdir, readFile, stat } from "fs/promises";
+import { createReadStream, createWriteStream, statSync } from "fs";
+import { rm, mkdir, stat as statAsync } from "fs/promises";
import { tmpdir } from "os";
import { basename, dirname, extname, resolve as resolvePath } from "path";
-
-import { Meter } from "./meter";
-import type { UploadHandler } from "../formData";
+import type { Readable } from "stream";
+import { finished } from "stream";
+import { promisify } from "util";
+import { MaxPartSizeExceededError } from "@remix-run/server-runtime";
+import type { UploadHandler } from "@remix-run/server-runtime";
+// @ts-expect-error
+import * as streamSlice from "stream-slice";
+
+import {
+ createReadableStreamFromReadable,
+ readableStreamToString,
+} from "../stream";
export type FileUploadHandlerFilterArgs = {
filename: string;
- encoding: string;
- mimetype: string;
+ contentType: string;
+ name: string;
};
export type FileUploadHandlerPathResolverArgs = {
filename: string;
- encoding: string;
- mimetype: string;
+ contentType: string;
+ name: string;
};
/**
@@ -46,7 +55,7 @@ export type FileUploadHandlerOptions = {
* The maximum upload size allowed. If the size is exceeded an error will be thrown.
* Defaults to 3000000B (3MB).
*/
- maxFileSize?: number;
+ maxPartSize?: number;
/**
*
* @param filename
@@ -67,7 +76,7 @@ async function uniqueFile(filepath: string) {
for (
let i = 1;
- await stat(uniqueFilepath)
+ await statAsync(uniqueFilepath)
.then(() => true)
.catch(() => false);
i++
@@ -85,31 +94,31 @@ export function createFileUploadHandler({
avoidFileConflicts = true,
file = defaultFilePathResolver,
filter,
- maxFileSize = 3000000,
-}: FileUploadHandlerOptions): UploadHandler {
- return async ({ name, stream, filename, encoding, mimetype }) => {
- if (filter && !(await filter({ filename, encoding, mimetype }))) {
- stream.resume();
- return;
+ maxPartSize = 3000000,
+}: FileUploadHandlerOptions = {}): UploadHandler {
+ return async ({ name, filename, contentType, data }) => {
+ if (
+ !filename ||
+ (filter && !(await filter({ name, filename, contentType })))
+ ) {
+ return undefined;
}
let dir =
typeof directory === "string"
? directory
- : directory({ filename, encoding, mimetype });
+ : directory({ name, filename, contentType });
if (!dir) {
- stream.resume();
- return;
+ return undefined;
}
let filedir = resolvePath(dir);
let path =
- typeof file === "string" ? file : file({ filename, encoding, mimetype });
+ typeof file === "string" ? file : file({ name, filename, contentType });
if (!path) {
- stream.resume();
- return;
+ return undefined;
}
let filepath = resolvePath(filedir, path);
@@ -120,35 +129,28 @@ export function createFileUploadHandler({
await mkdir(dirname(filepath), { recursive: true }).catch(() => {});
- let meter = new Meter(name, maxFileSize);
- await new Promise((resolve, reject) => {
- let writeFileStream = createWriteStream(filepath);
-
- let aborted = false;
- async function abort(error: Error) {
- if (aborted) return;
- aborted = true;
-
- stream.unpipe();
- meter.unpipe();
- stream.removeAllListeners();
- meter.removeAllListeners();
- writeFileStream.removeAllListeners();
-
- await rm(filepath, { force: true }).catch(() => {});
-
- reject(error);
+ let writeFileStream = createWriteStream(filepath);
+ let size = 0;
+ let deleteFile = false;
+ try {
+ for await (let chunk of data) {
+ size += chunk.byteLength;
+ if (size > maxPartSize) {
+ deleteFile = true;
+ throw new MaxPartSizeExceededError(name, maxPartSize);
+ }
+ writeFileStream.write(chunk);
}
+ } finally {
+ writeFileStream.end();
+ await promisify(finished)(writeFileStream);
- stream.on("error", abort);
- meter.on("error", abort);
- writeFileStream.on("error", abort);
- writeFileStream.on("finish", resolve);
-
- stream.pipe(meter).pipe(writeFileStream);
- });
+ if (deleteFile) {
+ await rm(filepath).catch(() => {});
+ }
+ }
- return new NodeOnDiskFile(filepath, meter.bytes, mimetype);
+ return new NodeOnDiskFile(filepath, contentType);
};
}
@@ -159,14 +161,48 @@ export class NodeOnDiskFile implements File {
constructor(
private filepath: string,
- public size: number,
- public type: string
+ public type: string,
+ private slicer?: { start: number; end: number }
) {
this.name = basename(filepath);
}
+ get size(): number {
+ let stats = statSync(this.filepath);
+
+ if (this.slicer) {
+ let slice = this.slicer.end - this.slicer.start;
+ return slice < 0 ? 0 : slice > stats.size ? stats.size : slice;
+ }
+
+ return stats.size;
+ }
+
+ slice(start?: number, end?: number, type?: string): Blob {
+ if (typeof start === "number" && start < 0) start = this.size + start;
+ if (typeof end === "number" && end < 0) end = this.size + end;
+
+ let startOffset = this.slicer?.start || 0;
+
+ start = startOffset + (start || 0);
+ end = startOffset + (end || this.size);
+ return new NodeOnDiskFile(
+ this.filepath,
+ typeof type === "string" ? type : this.type,
+ {
+ start,
+ end,
+ }
+ );
+ }
+
async arrayBuffer(): Promise {
- let stream = createReadStream(this.filepath);
+ let stream: Readable = createReadStream(this.filepath);
+ if (this.slicer) {
+ stream = stream.pipe(
+ streamSlice.slice(this.slicer.start, this.slicer.end)
+ );
+ }
return new Promise((resolve, reject) => {
let buf: any[] = [];
@@ -176,15 +212,23 @@ export class NodeOnDiskFile implements File {
});
}
- slice(start?: any, end?: any, contentType?: any): Blob {
- throw new Error("Method not implemented.");
- }
stream(): ReadableStream;
stream(): NodeJS.ReadableStream;
stream(): ReadableStream | NodeJS.ReadableStream {
- return createReadStream(this.filepath);
+ let stream: Readable = createReadStream(this.filepath);
+ if (this.slicer) {
+ stream = stream.pipe(
+ streamSlice.slice(this.slicer.start, this.slicer.end)
+ );
+ }
+ return createReadableStreamFromReadable(stream);
}
- text(): Promise {
- return readFile(this.filepath, "utf-8");
+
+ async text(): Promise {
+ return readableStreamToString(this.stream());
+ }
+
+ public get [Symbol.toStringTag]() {
+ return "File";
}
}
diff --git a/packages/remix-node/upload/memoryUploadHandler.ts b/packages/remix-node/upload/memoryUploadHandler.ts
deleted file mode 100644
index 5dde5b803f0..00000000000
--- a/packages/remix-node/upload/memoryUploadHandler.ts
+++ /dev/null
@@ -1,83 +0,0 @@
-import type { TransformCallback } from "stream";
-import { Transform } from "stream";
-import { File as BufferFile } from "@web-std/file";
-
-import { Meter } from "./meter";
-import type { UploadHandler } from "../formData";
-
-export type MemoryUploadHandlerFilterArgs = {
- filename: string;
- encoding: string;
- mimetype: string;
-};
-
-export type MemoryUploadHandlerOptions = {
- /**
- * The maximum upload size allowed. If the size is exceeded an error will be thrown.
- * Defaults to 3000000B (3MB).
- */
- maxFileSize?: number;
- /**
- *
- * @param filename
- * @param mimetype
- * @param encoding
- */
- filter?(args: MemoryUploadHandlerFilterArgs): boolean | Promise;
-};
-
-export function createMemoryUploadHandler({
- filter,
- maxFileSize = 3000000,
-}: MemoryUploadHandlerOptions): UploadHandler {
- return async ({ name, stream, filename, encoding, mimetype }) => {
- if (filter && !(await filter({ filename, encoding, mimetype }))) {
- stream.resume();
- return;
- }
-
- let bufferStream = new BufferStream();
- await new Promise((resolve, reject) => {
- let meter = new Meter(name, maxFileSize);
-
- let aborted = false;
- async function abort(error: Error) {
- if (aborted) return;
- aborted = true;
-
- stream.unpipe();
- meter.unpipe();
- stream.removeAllListeners();
- meter.removeAllListeners();
- bufferStream.removeAllListeners();
-
- reject(error);
- }
-
- stream.on("error", abort);
- meter.on("error", abort);
- bufferStream.on("error", abort);
- bufferStream.on("finish", resolve);
-
- stream.pipe(meter).pipe(bufferStream);
- });
-
- return new BufferFile(bufferStream.data, filename, {
- type: mimetype,
- });
- };
-}
-
-class BufferStream extends Transform {
- public data: any[];
-
- constructor() {
- super();
- this.data = [];
- }
-
- _transform(chunk: any, _: BufferEncoding, callback: TransformCallback) {
- this.data.push(chunk);
- callback();
- }
-}
diff --git a/packages/remix-node/upload/meter.ts b/packages/remix-node/upload/meter.ts
deleted file mode 100644
index 01e64b96410..00000000000
--- a/packages/remix-node/upload/meter.ts
+++ /dev/null
@@ -1,28 +0,0 @@
-import type { TransformCallback } from "stream";
-import { Transform } from "stream";
-
-export class Meter extends Transform {
- public bytes: number;
-
- constructor(public field: string, public maxBytes: number | undefined) {
- super();
- this.bytes = 0;
- }
-
- _transform(chunk: any, _: BufferEncoding, callback: TransformCallback) {
- this.bytes += chunk.length;
- this.push(chunk);
-
- if (typeof this.maxBytes === "number" && this.bytes > this.maxBytes) {
- return callback(new MeterError(this.field, this.maxBytes));
- }
-
- callback();
- }
-}
-
-export class MeterError extends Error {
- constructor(public field: string, public maxBytes: number) {
- super(`Field "${field}" exceeded upload size of ${maxBytes} bytes.`);
- }
-}
diff --git a/packages/remix-react/routes.tsx b/packages/remix-react/routes.tsx
index bcb6e3e8616..5873e13bf17 100644
--- a/packages/remix-react/routes.tsx
+++ b/packages/remix-react/routes.tsx
@@ -162,7 +162,7 @@ function createLoader(route: EntryRoute, routeModules: RouteModules) {
throw new CatchValue(
result.status,
result.statusText,
- await extractData(result.clone())
+ await extractData(result)
);
}
@@ -199,7 +199,7 @@ function createAction(route: EntryRoute, routeModules: RouteModules) {
throw new CatchValue(
result.status,
result.statusText,
- await extractData(result.clone())
+ await extractData(result)
);
}
diff --git a/packages/remix-server-runtime/__tests__/formData-test.ts b/packages/remix-server-runtime/__tests__/formData-test.ts
new file mode 100644
index 00000000000..3dd974c4efb
--- /dev/null
+++ b/packages/remix-server-runtime/__tests__/formData-test.ts
@@ -0,0 +1,175 @@
+import {
+ Request as NodeRequest,
+ FormData as NodeFormData,
+} from "@remix-run/web-fetch";
+import { Blob, File } from "@remix-run/web-file";
+
+import { parseMultipartFormData } from "../formData";
+
+class CustomError extends Error {
+ constructor() {
+ super("test error");
+ }
+}
+
+describe("parseMultipartFormData", () => {
+ it("can use a custom upload handler", async () => {
+ let formData = new NodeFormData();
+ formData.set("a", "value");
+ formData.set("blob", new Blob(["blob".repeat(1000)]), "blob.txt");
+ formData.set("file", new File(["file".repeat(1000)], "file.txt"));
+
+ let req = new NodeRequest("https://test.com", {
+ method: "post",
+ body: formData,
+ });
+
+ let parsedFormData = await parseMultipartFormData(
+ req,
+ async ({ filename, data, contentType }) => {
+ let chunks = [];
+ for await (let chunk of data) {
+ chunks.push(chunk);
+ }
+ if (filename) {
+ return new File(chunks, filename, { type: contentType });
+ }
+
+ return await new Blob(chunks, { type: contentType }).text();
+ }
+ );
+
+ expect(parsedFormData.get("a")).toBe("value");
+ let blob = parsedFormData.get("blob") as Blob;
+ expect(await blob.text()).toBe("blob".repeat(1000));
+ let file = parsedFormData.get("file") as File;
+ expect(file.name).toBe("file.txt");
+ expect(await file.text()).toBe("file".repeat(1000));
+ });
+
+ it("can return undefined", async () => {
+ let formData = new NodeFormData();
+ formData.set("a", "value");
+ formData.set("blob", new Blob(["blob".repeat(1000)]), "blob.txt");
+ formData.set("file", new File(["file".repeat(1000)], "file.txt"));
+
+ let req = new NodeRequest("https://test.com", {
+ method: "post",
+ body: formData,
+ });
+
+ let parsedFormData = await parseMultipartFormData(
+ req,
+ async () => undefined
+ );
+
+ expect(parsedFormData.get("a")).toBe(null);
+ expect(parsedFormData.get("blob")).toBe(null);
+ expect(parsedFormData.get("file")).toBe(null);
+ });
+
+ it("can throw errors in upload handlers", async () => {
+ let formData = new NodeFormData();
+ formData.set("blob", new Blob(["blob"]), "blob.txt");
+
+ let req = new NodeRequest("https://test.com", {
+ method: "post",
+ body: formData,
+ });
+
+ let error: Error;
+ try {
+ await parseMultipartFormData(req, async () => {
+ throw new CustomError();
+ });
+ throw new Error("should have thrown");
+ } catch (err) {
+ error = err;
+ }
+ expect(error).toBeInstanceOf(CustomError);
+ expect(error.message).toBe("test error");
+ });
+
+ describe("stream should propagate events", () => {
+ it("when controller errors", async () => {
+ let formData = new NodeFormData();
+ formData.set("a", "value");
+ formData.set("blob", new Blob(["blob".repeat(1000)]), "blob.txt");
+ formData.set("file", new File(["file".repeat(1000)], "file.txt"));
+
+ let underlyingRequest = new NodeRequest("https://test.com", {
+ method: "post",
+ body: formData,
+ });
+ let underlyingBody = await underlyingRequest.text();
+
+ let encoder = new TextEncoder();
+ let body = new ReadableStream({
+ start(controller) {
+ controller.enqueue(
+ encoder.encode(underlyingBody.slice(0, underlyingBody.length / 2))
+ );
+ controller.error(new CustomError());
+ },
+ });
+
+ let req = new NodeRequest("https://test.com", {
+ method: "post",
+ body,
+ headers: underlyingRequest.headers,
+ });
+
+ let error: Error;
+ try {
+ await parseMultipartFormData(req, async () => undefined);
+ throw new Error("should have thrown");
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error).toBeInstanceOf(CustomError);
+ expect(error.message).toBe("test error");
+ });
+
+ it("when controller is closed", async () => {
+ let formData = new NodeFormData();
+ formData.set("a", "value");
+ formData.set("blob", new Blob(["blob".repeat(1000)]), "blob.txt");
+ formData.set("file", new File(["file".repeat(1000)], "file.txt"));
+
+ let underlyingRequest = new NodeRequest("https://test.com", {
+ method: "post",
+ body: formData,
+ });
+ let underlyingBody = await underlyingRequest.text();
+
+ let encoder = new TextEncoder();
+ let body = new ReadableStream({
+ start(controller) {
+ controller.enqueue(
+ encoder.encode(underlyingBody.slice(0, underlyingBody.length / 2))
+ );
+ controller.close();
+ },
+ });
+
+ let req = new NodeRequest("https://test.com", {
+ method: "post",
+ body,
+ headers: underlyingRequest.headers,
+ });
+
+ let error: Error;
+ try {
+ let formData = await parseMultipartFormData(req, async () => undefined);
+ console.log(formData);
+ throw new Error("should have thrown");
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error).toBeInstanceOf(Error);
+ expect(error.message).toMatch("malformed multipart-form data");
+ });
+ });
+});
diff --git a/packages/remix-server-runtime/data.ts b/packages/remix-server-runtime/data.ts
index 674f3dbc1f3..4f909630bbc 100644
--- a/packages/remix-server-runtime/data.ts
+++ b/packages/remix-server-runtime/data.ts
@@ -80,7 +80,7 @@ export async function callRouteLoader({
let result;
try {
result = await loader({
- request: stripDataParam(stripIndexParam(request.clone())),
+ request: stripDataParam(stripIndexParam(request)),
context: loadContext,
params: match.params,
});
diff --git a/packages/remix-server-runtime/formData.ts b/packages/remix-server-runtime/formData.ts
new file mode 100644
index 00000000000..8d6d4095575
--- /dev/null
+++ b/packages/remix-server-runtime/formData.ts
@@ -0,0 +1,66 @@
+import { streamMultipart } from "@web3-storage/multipart-parser";
+
+export type UploadHandlerPart = {
+ name: string;
+ filename?: string;
+ contentType: string;
+ data: AsyncIterable;
+};
+
+export type UploadHandler = (
+ part: UploadHandlerPart
+) => Promise;
+
+export function composeUploadHandlers(
+ ...handlers: UploadHandler[]
+): UploadHandler {
+ return async (part) => {
+ for (let handler of handlers) {
+ let value = await handler(part);
+ if (typeof value !== "undefined" && value !== null) {
+ return value;
+ }
+ }
+
+ return undefined;
+ };
+}
+
+/**
+ * Allows you to handle multipart forms (file uploads) for your app.
+ *
+ * TODO: Update this comment
+ * @see https://remix.run/api/remix#parsemultipartformdata-node
+ */
+export async function parseMultipartFormData(
+ request: Request,
+ uploadHandler: UploadHandler
+): Promise {
+ let contentType = request.headers.get("Content-Type") || "";
+ let [type, boundary] = contentType.split(/\s*;\s*boundary=/);
+
+ if (!request.body || !boundary || type !== "multipart/form-data") {
+ throw new TypeError("Could not parse content as FormData.");
+ }
+
+ let formData = new FormData();
+ let parts: AsyncIterable =
+ streamMultipart(request.body, boundary);
+
+ for await (let part of parts) {
+ if (part.done) break;
+
+ if (typeof part.filename === "string") {
+ // only pass basename as the multipart/form-data spec recommends
+ // https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
+ part.filename = part.filename.split(/[/\\]/).pop();
+ }
+
+ let value = await uploadHandler(part);
+ if (typeof value !== "undefined" && value !== null) {
+ formData.append(part.name, value as any);
+ }
+ }
+
+ return formData;
+}
diff --git a/packages/remix-server-runtime/index.ts b/packages/remix-server-runtime/index.ts
index 7d19aea99fe..6af6134390b 100644
--- a/packages/remix-server-runtime/index.ts
+++ b/packages/remix-server-runtime/index.ts
@@ -1,5 +1,9 @@
// Default implementations for the Remix server runtime interface
export { createCookieFactory, isCookie } from "./cookies";
+export {
+ composeUploadHandlers as unstable_composeUploadHandlers,
+ parseMultipartFormData as unstable_parseMultipartFormData,
+} from "./formData";
export { json, redirect } from "./responses";
export { createRequestHandler } from "./server";
export {
@@ -9,6 +13,8 @@ export {
} from "./sessions";
export { createCookieSessionStorageFactory } from "./sessions/cookieStorage";
export { createMemorySessionStorageFactory } from "./sessions/memoryStorage";
+export { createMemoryUploadHandler as unstable_createMemoryUploadHandler } from "./upload/memoryUploadHandler";
+export { MaxPartSizeExceededError } from "./upload/errors";
// Types for the Remix server runtime interface
export type {
@@ -59,4 +65,8 @@ export type {
SessionStorage,
SignFunction,
UnsignFunction,
+ UploadHandlerPart,
+ UploadHandler,
+ MemoryUploadHandlerOptions,
+ MemoryUploadHandlerFilterArgs,
} from "./reexport";
diff --git a/packages/remix-server-runtime/package.json b/packages/remix-server-runtime/package.json
index e45fe1aa2d3..9044b8671df 100644
--- a/packages/remix-server-runtime/package.json
+++ b/packages/remix-server-runtime/package.json
@@ -15,6 +15,7 @@
},
"dependencies": {
"@types/cookie": "^0.4.0",
+ "@web3-storage/multipart-parser": "^1.0.0",
"cookie": "^0.4.1",
"jsesc": "^3.0.1",
"react-router-dom": "^6.2.2",
@@ -26,6 +27,7 @@
"react-dom": ">=16.8"
},
"devDependencies": {
+ "@remix-run/web-file": "^3.0.2",
"@types/jsesc": "^2.5.1",
"@types/set-cookie-parser": "^2.4.1"
},
diff --git a/packages/remix-server-runtime/reexport.ts b/packages/remix-server-runtime/reexport.ts
index 3747a38bf48..add615c4b6c 100644
--- a/packages/remix-server-runtime/reexport.ts
+++ b/packages/remix-server-runtime/reexport.ts
@@ -5,6 +5,12 @@ export type {
ServerEntryModule,
} from "./build";
+export type { UploadHandlerPart, UploadHandler } from "./formData";
+export type {
+ MemoryUploadHandlerOptions,
+ MemoryUploadHandlerFilterArgs,
+} from "./upload/memoryUploadHandler";
+
export type {
Cookie,
CookieOptions,
diff --git a/packages/remix-server-runtime/server.ts b/packages/remix-server-runtime/server.ts
index 2da6cd3a2ae..35bdc2593dc 100644
--- a/packages/remix-server-runtime/server.ts
+++ b/packages/remix-server-runtime/server.ts
@@ -150,10 +150,10 @@ async function handleDataRequest({
}
if (handleDataRequest) {
- response = await handleDataRequest(response.clone(), {
+ response = await handleDataRequest(response, {
context: loadContext,
params: match.params,
- request: request.clone(),
+ request,
});
}
@@ -457,7 +457,7 @@ async function handleDocumentRequest({
let handleDocumentRequest = build.entry.module.default;
try {
return await handleDocumentRequest(
- request.clone(),
+ request,
responseStatusCode,
responseHeaders,
entryContext
@@ -477,7 +477,7 @@ async function handleDocumentRequest({
try {
return await handleDocumentRequest(
- request.clone(),
+ request,
responseStatusCode,
responseHeaders,
entryContext
diff --git a/packages/remix-server-runtime/upload/errors.ts b/packages/remix-server-runtime/upload/errors.ts
new file mode 100644
index 00000000000..e5ed3d42f6b
--- /dev/null
+++ b/packages/remix-server-runtime/upload/errors.ts
@@ -0,0 +1,5 @@
+export class MaxPartSizeExceededError extends Error {
+ constructor(public field: string, public maxBytes: number) {
+ super(`Field "${field}" exceeded upload size of ${maxBytes} bytes.`);
+ }
+}
diff --git a/packages/remix-server-runtime/upload/memoryUploadHandler.ts b/packages/remix-server-runtime/upload/memoryUploadHandler.ts
new file mode 100644
index 00000000000..51331faa9be
--- /dev/null
+++ b/packages/remix-server-runtime/upload/memoryUploadHandler.ts
@@ -0,0 +1,51 @@
+import type { UploadHandler } from "@remix-run/server-runtime";
+
+import { MaxPartSizeExceededError } from "./errors";
+
+export type MemoryUploadHandlerFilterArgs = {
+ filename?: string;
+ contentType: string;
+ name: string;
+};
+
+export type MemoryUploadHandlerOptions = {
+ /**
+ * The maximum upload size allowed. If the size is exceeded an error will be thrown.
+ * Defaults to 3000000B (3MB).
+ */
+ maxPartSize?: number;
+ /**
+ *
+ * @param filename
+ * @param mimetype
+ * @param encoding
+ */
+ filter?(args: MemoryUploadHandlerFilterArgs): boolean | Promise;
+};
+
+export function createMemoryUploadHandler({
+ filter,
+ maxPartSize = 3000000,
+}: MemoryUploadHandlerOptions = {}): UploadHandler {
+ return async ({ filename, contentType, name, data }) => {
+ if (filter && !(await filter({ filename, contentType, name }))) {
+ return undefined;
+ }
+
+ let size = 0;
+ let chunks = [];
+ for await (let chunk of data) {
+ size += chunk.byteLength;
+ if (size > maxPartSize) {
+ throw new MaxPartSizeExceededError(name, maxPartSize);
+ }
+ chunks.push(chunk);
+ }
+
+ if (typeof filename === "string") {
+ return new File(chunks, filename, { type: contentType });
+ }
+
+ return await new Blob(chunks, { type: contentType }).text();
+ };
+}
diff --git a/packages/remix-vercel/__tests__/server-test.ts b/packages/remix-vercel/__tests__/server-test.ts
index 1b0501b6235..7b2f138b16c 100644
--- a/packages/remix-vercel/__tests__/server-test.ts
+++ b/packages/remix-vercel/__tests__/server-test.ts
@@ -143,7 +143,8 @@ describe("vercel createRemixHeaders", () => {
it("handles empty headers", () => {
expect(createRemixHeaders({})).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {},
+ Symbol(query): Array [],
+ Symbol(context): null,
}
`);
});
@@ -151,11 +152,11 @@ describe("vercel createRemixHeaders", () => {
it("handles simple headers", () => {
expect(createRemixHeaders({ "x-foo": "bar" })).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -164,14 +165,13 @@ describe("vercel createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar", "x-bar": "baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -180,11 +180,11 @@ describe("vercel createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar, baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -193,14 +193,13 @@ describe("vercel createRemixHeaders", () => {
expect(createRemixHeaders({ "x-foo": "bar, baz", "x-bar": "baz" }))
.toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "x-bar": Array [
- "baz",
- ],
- "x-foo": Array [
- "bar, baz",
- ],
- },
+ Symbol(query): Array [
+ "x-foo",
+ "bar, baz",
+ "x-bar",
+ "baz",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -215,12 +214,13 @@ describe("vercel createRemixHeaders", () => {
})
).toMatchInlineSnapshot(`
Headers {
- Symbol(map): Object {
- "set-cookie": Array [
- "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
- "__other=some_other_value; Path=/; Secure; HttpOnly; MaxAge=3600; SameSite=Lax",
- ],
- },
+ Symbol(query): Array [
+ "set-cookie",
+ "__session=some_value; Path=/; Secure; HttpOnly; MaxAge=7200; SameSite=Lax",
+ "set-cookie",
+ "__other=some_other_value; Path=/; Secure; HttpOnly; MaxAge=3600; SameSite=Lax",
+ ],
+ Symbol(context): null,
}
`);
});
@@ -241,49 +241,37 @@ describe("vercel createRemixRequest", () => {
expect(createRemixRequest(request)).toMatchInlineSnapshot(`
NodeRequest {
- "abortController": undefined,
"agent": undefined,
"compress": true,
"counter": 0,
"follow": 20,
+ "highWaterMark": 16384,
+ "insecureHTTPParser": false,
"size": 0,
- "timeout": 0,
Symbol(Body internals): Object {
"body": null,
+ "boundary": null,
"disturbed": false,
"error": null,
+ "size": 0,
+ "type": null,
},
Symbol(Request internals): Object {
"headers": Headers {
- Symbol(map): Object {
- "cache-control": Array [
- "max-age=300, s-maxage=3600",
- ],
- "x-forwarded-host": Array [
- "localhost:3000",
- ],
- "x-forwarded-proto": Array [
- "http",
- ],
- },
+ Symbol(query): Array [
+ "cache-control",
+ "max-age=300, s-maxage=3600",
+ "x-forwarded-host",
+ "localhost:3000",
+ "x-forwarded-proto",
+ "http",
+ ],
+ Symbol(context): null,
},
"method": "GET",
- "parsedURL": Url {
- "auth": null,
- "hash": null,
- "host": "localhost:3000",
- "hostname": "localhost",
- "href": "http://localhost:3000/foo/bar",
- "path": "/foo/bar",
- "pathname": "/foo/bar",
- "port": "3000",
- "protocol": "http:",
- "query": null,
- "search": null,
- "slashes": true,
- },
+ "parsedURL": "http://localhost:3000/foo/bar",
"redirect": "follow",
- "signal": undefined,
+ "signal": AbortSignal {},
},
}
`);
diff --git a/packages/remix-vercel/server.ts b/packages/remix-vercel/server.ts
index ab100de7db5..994b7632bcb 100644
--- a/packages/remix-vercel/server.ts
+++ b/packages/remix-vercel/server.ts
@@ -6,11 +6,11 @@ import type {
Response as NodeResponse,
} from "@remix-run/node";
import {
- // This has been added as a global in node 15+
AbortController,
createRequestHandler as createRemixRequestHandler,
Headers as NodeHeaders,
Request as NodeRequest,
+ writeReadableStreamToWritable,
} from "@remix-run/node";
/**
@@ -46,23 +46,15 @@ export function createRequestHandler({
let handleRequest = createRemixRequestHandler(build, mode);
return async (req, res) => {
- let abortController = new AbortController();
- let request = createRemixRequest(req, abortController);
+ let request = createRemixRequest(req);
let loadContext =
typeof getLoadContext === "function"
? getLoadContext(req, res)
: undefined;
- let response = (await handleRequest(
- request as unknown as Request,
- loadContext
- )) as unknown as NodeResponse;
+ let response = (await handleRequest(request, loadContext)) as NodeResponse;
- if (abortController.signal.aborted) {
- response.headers.set("Connection", "close");
- }
-
- sendRemixResponse(res, response);
+ await sendRemixResponse(res, response);
};
}
@@ -70,6 +62,7 @@ export function createRemixHeaders(
requestHeaders: VercelRequest["headers"]
): NodeHeaders {
let headers = new NodeHeaders();
+
for (let key in requestHeaders) {
let header = requestHeaders[key]!;
// set-cookie is an array (maybe others)
@@ -85,20 +78,22 @@ export function createRemixHeaders(
return headers;
}
-export function createRemixRequest(
- req: VercelRequest,
- abortController?: AbortController
-): NodeRequest {
+export function createRemixRequest(req: VercelRequest): NodeRequest {
let host = req.headers["x-forwarded-host"] || req.headers["host"];
// doesn't seem to be available on their req object!
let protocol = req.headers["x-forwarded-proto"] || "https";
let url = new URL(req.url!, `${protocol}://${host}`);
+ let controller = new AbortController();
+
+ req.on("close", () => {
+ controller.abort();
+ });
+
let init: NodeRequestInit = {
method: req.method,
headers: createRemixHeaders(req.headers),
- abortController,
- signal: abortController?.signal,
+ signal: controller.signal,
};
if (req.method !== "GET" && req.method !== "HEAD") {
@@ -108,29 +103,20 @@ export function createRemixRequest(
return new NodeRequest(url.href, init);
}
-export function sendRemixResponse(
+export async function sendRemixResponse(
res: VercelResponse,
nodeResponse: NodeResponse
-): void {
- let arrays = new Map();
- for (let [key, value] of nodeResponse.headers.entries()) {
- if (arrays.has(key)) {
- let newValue = arrays.get(key).concat(value);
- res.setHeader(key, newValue);
- arrays.set(key, newValue);
- } else {
- res.setHeader(key, value);
- arrays.set(key, [value]);
- }
- }
-
+): Promise {
res.statusMessage = nodeResponse.statusText;
- res.writeHead(nodeResponse.status, nodeResponse.headers.raw());
-
- if (Buffer.isBuffer(nodeResponse.body)) {
- res.end(nodeResponse.body);
- } else if (nodeResponse.body?.pipe) {
- nodeResponse.body.pipe(res);
+ let multiValueHeaders = nodeResponse.headers.raw();
+ res.writeHead(
+ nodeResponse.status,
+ nodeResponse.statusText,
+ multiValueHeaders
+ );
+
+ if (nodeResponse.body) {
+ await writeReadableStreamToWritable(nodeResponse.body, res);
} else {
res.end();
}
diff --git a/tsconfig.json b/tsconfig.json
index 8ccfcf30261..222ff9cbbce 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -1,5 +1,8 @@
{
"files": [],
+ "exclude": [
+ "node_modules"
+ ],
"references": [
{ "path": "packages/create-remix" },
{ "path": "packages/remix" },
diff --git a/yarn.lock b/yarn.lock
index a160b9a5e30..699fa43911b 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1605,6 +1605,47 @@
stack-utils "2.0.5"
yazl "2.5.1"
+"@remix-run/web-blob@^3.0.3", "@remix-run/web-blob@^3.0.4":
+ version "3.0.4"
+ resolved "https://registry.npmjs.org/@remix-run/web-blob/-/web-blob-3.0.4.tgz#99c67b9d0fb641bd0c07d267fd218ae5aa4ae5ed"
+ integrity sha512-AfegzZvSSDc+LwnXV+SwROTrDtoLiPxeFW+jxgvtDAnkuCX1rrzmVJ6CzqZ1Ai0bVfmJadkG5GxtAfYclpPmgw==
+ dependencies:
+ "@remix-run/web-stream" "^1.0.0"
+ web-encoding "1.1.5"
+
+"@remix-run/web-fetch@^4.1.3":
+ version "4.1.3"
+ resolved "https://registry.npmjs.org/@remix-run/web-fetch/-/web-fetch-4.1.3.tgz#8ad3077c1b5bd9fe2a8813d0ad3c84970a495c04"
+ integrity sha512-D3KXAEkzhR248mu7wCHReQrMrIo3Y9pDDa7TrlISnsOEvqkfWkJJF+PQWmOIKpOSHAhDg7TCb2tzvW8lc/MfHw==
+ dependencies:
+ "@remix-run/web-blob" "^3.0.4"
+ "@remix-run/web-form-data" "^3.0.2"
+ "@remix-run/web-stream" "^1.0.3"
+ "@web3-storage/multipart-parser" "^1.0.0"
+ data-uri-to-buffer "^3.0.1"
+ mrmime "^1.0.0"
+
+"@remix-run/web-file@^3.0.2":
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/@remix-run/web-file/-/web-file-3.0.2.tgz#1a6cc0900a1310ede4bc96abad77ac6eb27a2131"
+ integrity sha512-eFC93Onh/rZ5kUNpCQersmBtxedGpaXK2/gsUl49BYSGK/DvuPu3l06vmquEDdcPaEuXcsdGP0L7zrmUqrqo4A==
+ dependencies:
+ "@remix-run/web-blob" "^3.0.3"
+
+"@remix-run/web-form-data@^3.0.2":
+ version "3.0.2"
+ resolved "https://registry.npmjs.org/@remix-run/web-form-data/-/web-form-data-3.0.2.tgz#733a4c8f8176523b7b60a8bd0dc6704fd4d498f3"
+ integrity sha512-F8tm3iB1sPxMpysK6Js7lV3gvLfTNKGmIW38t/e6dtPEB5L1WdbRG1cmLyhsonFc7rT1x1JKdz+2jCtoSdnIUw==
+ dependencies:
+ web-encoding "1.1.5"
+
+"@remix-run/web-stream@^1.0.0", "@remix-run/web-stream@^1.0.3":
+ version "1.0.3"
+ resolved "https://registry.yarnpkg.com/@remix-run/web-stream/-/web-stream-1.0.3.tgz#3284a6a45675d1455c4d9c8f31b89225c9006438"
+ integrity sha512-wlezlJaA5NF6SsNMiwQnnAW6tnPzQ5I8qk0Y0pSohm0eHKa2FQ1QhEKLVVcDDu02TmkfHgnux0igNfeYhDOXiA==
+ dependencies:
+ web-streams-polyfill "^3.1.1"
+
"@rollup/plugin-babel@^5.2.2":
version "5.3.0"
resolved "https://registry.npmjs.org/@rollup/plugin-babel/-/plugin-babel-5.3.0.tgz"
@@ -1783,13 +1824,6 @@
dependencies:
"@babel/types" "^7.3.0"
-"@types/blob-stream@^0.1.30":
- version "0.1.30"
- resolved "https://registry.npmjs.org/@types/blob-stream/-/blob-stream-0.1.30.tgz"
- integrity sha512-Cyp7/3KZfpQXcUPhcb/+VPubLQE8YzFXbUh1/KNVzBH6sykr0AJohdIzX8YWSy0YZIg1yI75DULDeEfr7lESSg==
- dependencies:
- "@types/node" "*"
-
"@types/body-parser@*":
version "1.19.1"
resolved "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.1.tgz"
@@ -1798,13 +1832,6 @@
"@types/connect" "*"
"@types/node" "*"
-"@types/busboy@^0.3.1":
- version "0.3.1"
- resolved "https://registry.npmjs.org/@types/busboy/-/busboy-0.3.1.tgz"
- integrity sha512-8BPLNy4x+7lbTOGkAyUIZrrPEZ7WzbO7YlVGMf9EZi9J9mqILEkYbt/kgVWQ7fizOISo1hM/7cAsWVTa7EhQDg==
- dependencies:
- "@types/node" "*"
-
"@types/cacache@^15.0.0":
version "15.0.1"
resolved "https://registry.npmjs.org/@types/cacache/-/cacache-15.0.1.tgz"
@@ -2095,7 +2122,7 @@
resolved "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz"
integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==
-"@types/node-fetch@^2.5.12", "@types/node-fetch@^2.5.7":
+"@types/node-fetch@^2.5.7":
version "2.5.12"
resolved "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.5.12.tgz"
integrity sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw==
@@ -2411,27 +2438,10 @@
ts-node "8.9.1"
typescript "4.3.4"
-"@web-std/blob@^3.0.0":
- version "3.0.1"
- resolved "https://registry.npmjs.org/@web-std/blob/-/blob-3.0.1.tgz"
- integrity sha512-opuhO8ZGGUj2jdFwfgMjWjVdKaHlQanGWXxj5wV2YQ1uGTuL/SADnsDitpMfRb+lSpmQyzpwZFfj4CNKQuwSKQ==
- dependencies:
- "@web-std/stream" "1.0.0"
- web-encoding "1.1.5"
-
-"@web-std/file@^3.0.0":
- version "3.0.0"
- resolved "https://registry.npmjs.org/@web-std/file/-/file-3.0.0.tgz"
- integrity sha512-ac2H3IUOky3GRJdbdJYgVvH+OApzpr0KX0t9p6Nj9AuHxKudc0pD7mVruekCW4CZv6DbOReDukwwskJN1bSCzA==
- dependencies:
- "@web-std/blob" "^3.0.0"
-
-"@web-std/stream@1.0.0":
+"@web3-storage/multipart-parser@^1.0.0":
version "1.0.0"
- resolved "https://registry.npmjs.org/@web-std/stream/-/stream-1.0.0.tgz"
- integrity sha512-jyIbdVl+0ZJyKGTV0Ohb9E6UnxP+t7ZzX4Do3AHjZKxUXKMs9EmqnBDQgHF7bEw0EzbQygOjtt/7gvtmi//iCQ==
- dependencies:
- web-streams-polyfill "^3.1.1"
+ resolved "https://registry.npmjs.org/@web3-storage/multipart-parser/-/multipart-parser-1.0.0.tgz#6b69dc2a32a5b207ba43e556c25cc136a56659c4"
+ integrity sha512-BEO6al7BYqcnfX15W2cnGR+Q566ACXAT9UQykORCWW80lmkpWsnEob6zJS1ZVBKsSJC8+7vJkHwlp+lXG1UCdw==
"@xmldom/xmldom@^0.7.5":
version "0.7.5"
@@ -3180,23 +3190,11 @@ bl@^4.0.3, bl@^4.1.0:
inherits "^2.0.4"
readable-stream "^3.4.0"
-blob-stream@^0.1.3:
- version "0.1.3"
- resolved "https://registry.npmjs.org/blob-stream/-/blob-stream-0.1.3.tgz"
- integrity sha1-mNZor2mW4PMu9mbQbiFczH13aGw=
- dependencies:
- blob "0.0.4"
-
blob-util@^2.0.2:
version "2.0.2"
resolved "https://registry.npmjs.org/blob-util/-/blob-util-2.0.2.tgz"
integrity sha512-T7JQa+zsXXEa6/8ZhHcQEW1UFfVM49Ts65uBkFL6fz2QmrElqmbajIDJvuA0tEhRe5eIjpV9ZF+0RfZR9voJFQ==
-blob@0.0.4:
- version "0.0.4"
- resolved "https://registry.npmjs.org/blob/-/blob-0.0.4.tgz"
- integrity sha1-vPEwUspURj8w+fx+lbmkdjCpSSE=
-
bluebird@^3.7.2:
version "3.7.2"
resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz"
@@ -3316,13 +3314,6 @@ builtin-modules@^3.1.0:
resolved "https://registry.npmjs.org/builtin-modules/-/builtin-modules-3.2.0.tgz"
integrity sha512-lGzLKcioL90C7wMczpkY0n/oART3MbBa8R9OFGE1rJxoVI86u4WAGfEk8Wjv10eKSyTHVGkSo3bvBylCEtk7LA==
-busboy@^0.3.1:
- version "0.3.1"
- resolved "https://registry.npmjs.org/busboy/-/busboy-0.3.1.tgz"
- integrity sha512-y7tTxhGKXcyBxRKAni+awqx8uqaJKrSFSNFSeRG5CsWNdmy2BIK+6VGWEW7TZnIO/533mtMEA4rOevQV815YJw==
- dependencies:
- dicer "0.3.0"
-
bytes@3.0.0:
version "3.0.0"
resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz"
@@ -3869,7 +3860,7 @@ core-js-pure@^3.16.0:
core-util-is@1.0.2:
version "1.0.2"
- resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
+ resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
core-util-is@~1.0.0:
@@ -4018,6 +4009,11 @@ dashdash@^1.12.0:
dependencies:
assert-plus "^1.0.0"
+data-uri-to-buffer@^3.0.1:
+ version "3.0.1"
+ resolved "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-3.0.1.tgz#594b8973938c5bc2c33046535785341abc4f3636"
+ integrity sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og==
+
data-urls@^2.0.0:
version "2.0.0"
resolved "https://registry.npmjs.org/data-urls/-/data-urls-2.0.0.tgz"
@@ -4208,13 +4204,6 @@ detect-newline@3.1.0, detect-newline@^3.0.0:
resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz"
integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==
-dicer@0.3.0:
- version "0.3.0"
- resolved "https://registry.npmjs.org/dicer/-/dicer-0.3.0.tgz"
- integrity sha512-MdceRRWqltEG2dZqO769g27N/3PXfcKl04VhYnBlo2YhH7zPi88VebsjTKclaOyiuMaGU72hTfw3VkUitGcVCA==
- dependencies:
- streamsearch "0.1.2"
-
diff-sequences@^27.5.1:
version "27.5.1"
resolved "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.5.1.tgz"
@@ -5130,7 +5119,7 @@ extract-zip@2.0.1:
extsprintf@1.3.0:
version "1.3.0"
- resolved "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz"
+ resolved "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=
extsprintf@^1.2.0:
@@ -5331,18 +5320,9 @@ form-data@^3.0.0:
combined-stream "^1.0.8"
mime-types "^2.1.12"
-form-data@^4.0.0:
- version "4.0.0"
- resolved "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz"
- integrity sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==
- dependencies:
- asynckit "^0.4.0"
- combined-stream "^1.0.8"
- mime-types "^2.1.12"
-
form-data@~2.3.2:
version "2.3.3"
- resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz"
+ resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6"
integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==
dependencies:
asynckit "^0.4.0"
@@ -8016,6 +7996,11 @@ morgan@^1.10.0:
on-finished "~2.3.0"
on-headers "~1.0.2"
+mrmime@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.0.tgz#14d387f0585a5233d291baba339b063752a2398b"
+ integrity sha512-a70zx7zFfVO7XpnQ2IX1Myh9yY4UYvfld/dikWRnsXxbyvMcfz+u6UfgNAtH+k2QqtJuzVpv6eLTx1G2+WKZbQ==
+
ms@2.0.0:
version "2.0.0"
resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz"
@@ -9884,10 +9869,10 @@ stream-shift@^1.0.0:
resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz"
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
-streamsearch@0.1.2:
+stream-slice@^0.1.2:
version "0.1.2"
- resolved "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz"
- integrity sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=
+ resolved "https://registry.npmjs.org/stream-slice/-/stream-slice-0.1.2.tgz#2dc4f4e1b936fb13f3eb39a2def1932798d07a4b"
+ integrity sha1-LcT04bk2+xPz6zmi3vGTJ5jQeks=
strict-event-emitter@^0.2.0:
version "0.2.0"