From 698f0864ff60876373a79075ba4d5aff82933168 Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 10:33:05 -0400 Subject: [PATCH 01/10] add sd-streams from https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/ --- cli/js/body.ts | 77 +- cli/js/dom_types.ts | 138 +- cli/js/errors.ts | 6 +- cli/js/globals.ts | 7 +- cli/js/request.ts | 10 +- cli/js/request_test.ts | 34 +- cli/js/streams/mod.ts | 25 + cli/js/streams/pipe-to.ts | 233 +++ cli/js/streams/queue-mixin.ts | 81 + cli/js/streams/queue.ts | 65 + .../readable-byte-stream-controller.ts | 209 +++ cli/js/streams/readable-internals.ts | 1355 +++++++++++++++++ cli/js/streams/readable-stream-byob-reader.ts | 91 ++ .../streams/readable-stream-byob-request.ts | 60 + .../readable-stream-default-controller.ts | 134 ++ .../streams/readable-stream-default-reader.ts | 75 + cli/js/streams/readable-stream.ts | 384 +++++ cli/js/streams/shared-internals.ts | 307 ++++ cli/js/streams/strategies.ts | 36 + cli/js/streams/transform-internals.ts | 362 +++++ .../transform-stream-default-controller.ts | 57 + cli/js/streams/transform-stream.ts | 143 ++ cli/js/streams/writable-internals.ts | 790 ++++++++++ .../writable-stream-default-controller.ts | 97 ++ .../streams/writable-stream-default-writer.ts | 135 ++ cli/js/streams/writable-stream.ts | 117 ++ 26 files changed, 5009 insertions(+), 19 deletions(-) create mode 100644 cli/js/streams/mod.ts create mode 100644 cli/js/streams/pipe-to.ts create mode 100644 cli/js/streams/queue-mixin.ts create mode 100644 cli/js/streams/queue.ts create mode 100644 cli/js/streams/readable-byte-stream-controller.ts create mode 100644 cli/js/streams/readable-internals.ts create mode 100644 cli/js/streams/readable-stream-byob-reader.ts create mode 100644 cli/js/streams/readable-stream-byob-request.ts create mode 100644 cli/js/streams/readable-stream-default-controller.ts create mode 100644 cli/js/streams/readable-stream-default-reader.ts create mode 100644 cli/js/streams/readable-stream.ts create mode 100644 cli/js/streams/shared-internals.ts create mode 100644 cli/js/streams/strategies.ts create mode 100644 cli/js/streams/transform-internals.ts create mode 100644 cli/js/streams/transform-stream-default-controller.ts create mode 100644 cli/js/streams/transform-stream.ts create mode 100644 cli/js/streams/writable-internals.ts create mode 100644 cli/js/streams/writable-stream-default-controller.ts create mode 100644 cli/js/streams/writable-stream-default-writer.ts create mode 100644 cli/js/streams/writable-stream.ts diff --git a/cli/js/body.ts b/cli/js/body.ts index 6567b1934b7789..f495720678740c 100644 --- a/cli/js/body.ts +++ b/cli/js/body.ts @@ -3,6 +3,7 @@ import * as blob from "./blob.ts"; import * as encoding from "./text_encoding.ts"; import * as headers from "./headers.ts"; import * as domTypes from "./dom_types.ts"; +import { window } from "./window.ts"; const { Headers } = headers; @@ -12,6 +13,13 @@ const { TextEncoder, TextDecoder } = encoding; const Blob = blob.DenoBlob; const DenoBlob = blob.DenoBlob; +type ReadableStreamReader = domTypes.ReadableStreamReader; + +interface ReadableStreamController { + enqueue(chunk: string | ArrayBuffer): void; + close(): void; +} + export type BodySource = | domTypes.Blob | domTypes.BufferSource @@ -37,6 +45,8 @@ function validateBodyType(owner: Body, bodySource: BodySource): boolean { return true; } else if (typeof bodySource === "string") { return true; + } else if (bodySource instanceof window.ReadableStream) { + return true; } else if (bodySource instanceof FormData) { return true; } else if (!bodySource) { @@ -47,6 +57,58 @@ function validateBodyType(owner: Body, bodySource: BodySource): boolean { ); } +function concatenate(...arrays: Uint8Array[]): ArrayBuffer { + let totalLength = 0; + for (const arr of arrays) { + totalLength += arr.length; + } + const result = new Uint8Array(totalLength); + let offset = 0; + for (const arr of arrays) { + result.set(arr, offset); + offset += arr.length; + } + return result.buffer as ArrayBuffer; +} + +function bufferFromStream(stream: ReadableStreamReader): Promise { + return new Promise( + (resolve, reject): void => { + const parts: Uint8Array[] = []; + const encoder = new TextEncoder(); + // recurse + (function pump(): void { + stream + .read() + .then( + ({ done, value }): void => { + if (done) { + return resolve(concatenate(...parts)); + } + + if (typeof value === "string") { + parts.push(encoder.encode(value)); + } else if (value instanceof ArrayBuffer) { + parts.push(new Uint8Array(value)); + } else if (!value) { + // noop for undefined + } else { + reject("unhandled type on stream read"); + } + + return pump(); + } + ) + .catch( + (err): void => { + reject(err); + } + ); + })(); + } + ); +} + function getHeaderValueParams(value: string): Map { const params = new Map(); // Forced to do so for some Map constructor param mismatch @@ -81,8 +143,18 @@ export class Body implements domTypes.Body { if (this._stream) { return this._stream; } + + if (this._bodySource instanceof window.ReadableStream) { + // @ts-ignore + this._stream = this._bodySource; + } if (typeof this._bodySource === "string") { - throw Error("not implemented"); + this._stream = new window.ReadableStream({ + start(controller: ReadableStreamController): void { + controller.enqueue(this._bodySource); + controller.close(); + } + }); } return this._stream; } @@ -259,6 +331,9 @@ export class Body implements domTypes.Body { } else if (typeof this._bodySource === "string") { const enc = new TextEncoder(); return enc.encode(this._bodySource).buffer as ArrayBuffer; + } else if (this._bodySource instanceof window.ReadableStream) { + // @ts-ignore + return bufferFromStream(this._bodySource.getReader()); } else if (this._bodySource instanceof FormData) { const enc = new TextEncoder(); return enc.encode(this._bodySource.toString()).buffer as ArrayBuffer; diff --git a/cli/js/dom_types.ts b/cli/js/dom_types.ts index 308505cf51521f..aeba2771f6075c 100644 --- a/cli/js/dom_types.ts +++ b/cli/js/dom_types.ts @@ -248,7 +248,7 @@ export interface AddEventListenerOptions extends EventListenerOptions { passive: boolean; } -interface AbortSignal extends EventTarget { +export interface AbortSignal extends EventTarget { readonly aborted: boolean; onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null; addEventListener( @@ -273,19 +273,6 @@ interface AbortSignal extends EventTarget { ): void; } -export interface ReadableStream { - readonly locked: boolean; - cancel(): Promise; - getReader(): ReadableStreamReader; - tee(): [ReadableStream, ReadableStream]; -} - -export interface ReadableStreamReader { - cancel(): Promise; - read(): Promise; - releaseLock(): void; -} - export interface FormData extends DomIterable { append(name: string, value: string | Blob, fileName?: string): void; delete(name: string): void; @@ -343,6 +330,129 @@ export interface Body { text(): Promise; } +export interface ReadableStream { + readonly locked: boolean; + cancel(): Promise; + getReader(): ReadableStreamReader; + tee(): [ReadableStream, ReadableStream]; +} + +export interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + getWriter(): WritableStreamDefaultWriter; +} + +export interface PipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + preventClose?: boolean; + signal?: AbortSignal; +} + +export interface UnderlyingSource { + cancel?: ReadableStreamErrorCallback; + pull?: ReadableStreamDefaultControllerCallback; + start?: ReadableStreamDefaultControllerCallback; + type?: undefined; +} + +export interface UnderlyingByteSource { + autoAllocateChunkSize?: number; + cancel?: ReadableStreamErrorCallback; + pull?: ReadableByteStreamControllerCallback; + start?: ReadableByteStreamControllerCallback; + type: "bytes"; +} + +export interface UnderlyingSink { + abort?: WritableStreamErrorCallback; + close?: WritableStreamDefaultControllerCloseCallback; + start?: WritableStreamDefaultControllerStartCallback; + type?: undefined; + write?: WritableStreamDefaultControllerWriteCallback; +} + +export interface ReadableStreamReader { + cancel(): Promise; + read(): Promise; + releaseLock(): void; +} + +export interface ReadableStreamErrorCallback { + (reason: any): void | PromiseLike; +} + +export interface ReadableByteStreamControllerCallback { + (controller: ReadableByteStreamController): void | PromiseLike; +} + +export interface ReadableStreamDefaultControllerCallback { + (controller: ReadableStreamDefaultController): void | PromiseLike; +} + +export interface ReadableStreamDefaultController { + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: R): void; + error(error?: any): void; +} + +export interface ReadableByteStreamController { + readonly byobRequest: ReadableStreamBYOBRequest | undefined; + readonly desiredSize: number | null; + close(): void; + enqueue(chunk: ArrayBufferView): void; + error(error?: any): void; +} + +export interface ReadableStreamBYOBRequest { + readonly view: ArrayBufferView; + respond(bytesWritten: number): void; + respondWithNewView(view: ArrayBufferView): void; +} + +export interface WritableStreamDefaultWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + abort(reason?: any): Promise; + close(): Promise; + releaseLock(): void; + write(chunk: W): Promise; +} + +export interface WritableStreamErrorCallback { + (reason: any): void | PromiseLike; +} + +export interface WritableStreamDefaultControllerCloseCallback { + (): void | PromiseLike; +} + +export interface WritableStreamDefaultControllerStartCallback { + (controller: WritableStreamDefaultController): void | PromiseLike; +} + +export interface WritableStreamDefaultControllerWriteCallback { + (chunk: W, controller: WritableStreamDefaultController): void | PromiseLike< + void + >; +} + +export interface WritableStreamDefaultController { + error(error?: any): void; +} + +export interface QueuingStrategy { + highWaterMark?: number; + size?: QueuingStrategySizeCallback; +} + +export interface QueuingStrategySizeCallback { + (chunk: T): number; +} + export interface Headers extends DomIterable { /** Appends a new value onto an existing header inside a `Headers` object, or * adds the header if it does not already exist. diff --git a/cli/js/errors.ts b/cli/js/errors.ts index 02ddfa2f2801f8..57297f67269f74 100644 --- a/cli/js/errors.ts +++ b/cli/js/errors.ts @@ -75,5 +75,9 @@ export enum ErrorKind { UnsupportedFetchScheme = 47, TooManyRedirects = 48, Diagnostic = 49, - JSError = 50 + JSError = 50, + + /** TODO These are DomError Types, and should be moved there when it exists */ + DataCloneError = 51, + AbortError = 52 } diff --git a/cli/js/globals.ts b/cli/js/globals.ts index b734b8da31a968..f54cb5fa0eaf34 100644 --- a/cli/js/globals.ts +++ b/cli/js/globals.ts @@ -26,8 +26,8 @@ import * as url from "./url.ts"; import * as urlSearchParams from "./url_search_params.ts"; import * as workers from "./workers.ts"; import * as performanceUtil from "./performance.ts"; - import * as request from "./request.ts"; +import * as streams from "./streams/mod.ts"; // These imports are not exposed and therefore are fine to just import the // symbols required. @@ -138,6 +138,11 @@ export type Request = domTypes.Request; window.Response = fetchTypes.Response; export type Response = domTypes.Response; +window.ReadableStream = streams.ReadableStream; +export type ReadableStream = domTypes.ReadableStream; +window.WritableStream = streams.WritableStream; +export type WritableStream = domTypes.WritableStream; + window.performance = new performanceUtil.Performance(); // This variable functioning correctly depends on `declareAsLet` diff --git a/cli/js/request.ts b/cli/js/request.ts index 0c77b88548758b..345792c5c90c83 100644 --- a/cli/js/request.ts +++ b/cli/js/request.ts @@ -2,8 +2,10 @@ import * as headers from "./headers.ts"; import * as body from "./body.ts"; import * as domTypes from "./dom_types.ts"; +import * as streams from "./streams/mod.ts"; const { Headers } = headers; +const { ReadableStream } = streams; function byteUpperCase(s: string): string { return String(s).replace(/[a-z]/g, function byteUpperCaseReplace(c): string { @@ -138,7 +140,13 @@ export class Request extends body.Body implements domTypes.Request { headersList.push(header); } - const body2 = this._bodySource; + let body2 = this._bodySource; + + if (this._bodySource instanceof ReadableStream) { + const tees = (this._bodySource as domTypes.ReadableStream).tee(); + this._stream = this._bodySource = tees[0]; + body2 = tees[1]; + } const cloned = new Request(this.url, { body: body2, diff --git a/cli/js/request_test.ts b/cli/js/request_test.ts index e9e1f5164c6a21..3daca8f5a186a9 100644 --- a/cli/js/request_test.ts +++ b/cli/js/request_test.ts @@ -1,5 +1,5 @@ // Copyright 2018-2019 the Deno authors. All rights reserved. MIT license. -import { test, assertEquals } from "./test_util.ts"; +import { test, assert, assertEquals } from "./test_util.ts"; test(function fromInit(): void { const req = new Request("https://example.com", { @@ -15,3 +15,35 @@ test(function fromInit(): void { assertEquals(req.url, "https://example.com"); assertEquals(req.headers.get("test-header"), "value"); }); + +test(function fromRequest(): void { + const r = new Request("https://example.com"); + // @ts-ignore + r._bodySource = "ahoyhoy"; + r.headers.set("test-header", "value"); + + const req = new Request(r); + + // @ts-ignore + assertEquals(req._bodySource, r._bodySource); + assertEquals(req.url, r.url); + assertEquals(req.headers.get("test-header"), r.headers.get("test-header")); +}); + +test(async function cloneRequestBodyStream(): Promise { + // hack to get a stream + const stream = new Request("", { body: "a test body" }).body; + const r1 = new Request("https://example.com", { + body: stream + }); + + const r2 = r1.clone(); + + const b1 = await r1.text(); + const b2 = await r2.text(); + + assertEquals(b1, b2); + + // @ts-ignore + assert(r1._bodySource !== r2._bodySource); +}); diff --git a/cli/js/streams/mod.ts b/cli/js/streams/mod.ts new file mode 100644 index 00000000000000..a6d7b4cceab783 --- /dev/null +++ b/cli/js/streams/mod.ts @@ -0,0 +1,25 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * @stardazed/streams - implementation of the web streams standard + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +export { SDReadableStream as ReadableStream } from "./readable-stream.ts"; +export { WritableStream } from "./writable-stream.ts"; + +export { TransformStream } from "./transform-stream.ts"; +export { + ByteLengthQueuingStrategy, + CountQueuingStrategy +} from "./strategies.ts"; + +// only for linked web standard implementations +export { + createReadableStream as internal_createReadableStream, + createReadableByteStream as internal_createReadableByteStream, + readableStreamTee as internal_readableStreamTee +} from "./readable-stream.ts"; diff --git a/cli/js/streams/pipe-to.ts b/cli/js/streams/pipe-to.ts new file mode 100644 index 00000000000000..2dee4cdb9de407 --- /dev/null +++ b/cli/js/streams/pipe-to.ts @@ -0,0 +1,233 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/pipe-to - pipeTo algorithm implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; + +import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts"; +import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; +import { PipeOptions } from "../dom_types.ts"; +import { DenoError, ErrorKind } from "../errors.ts"; + +// add a wrapper to handle falsy rejections +interface ErrorWrapper { + actualError: shared.ErrorResult; +} + +export function pipeTo( + source: rs.SDReadableStream, + dest: ws.WritableStream, + options: PipeOptions +) { + const preventClose = !!options.preventClose; + const preventAbort = !!options.preventAbort; + const preventCancel = !!options.preventCancel; + const signal = options.signal; + + let shuttingDown = false; + let latestWrite = Promise.resolve(); + const promise = shared.createControlledPromise(); + + // If IsReadableByteStreamController(this.[[readableStreamController]]) is true, let reader be either ! AcquireReadableStreamBYOBReader(this) or ! AcquireReadableStreamDefaultReader(this), at the user agent’s discretion. + // Otherwise, let reader be ! AcquireReadableStreamDefaultReader(this). + const reader = new ReadableStreamDefaultReader(source); + const writer = new WritableStreamDefaultWriter(dest); + + let abortAlgorithm: () => any; + if (signal !== undefined) { + abortAlgorithm = () => { + // TODO this should be a DOMException, + // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38 + const error = new DenoError(ErrorKind.AbortError, "Aborted"); + const actions: (() => Promise)[] = []; + if (preventAbort === false) { + actions.push(() => { + if (dest[shared.state_] === "writable") { + return ws.writableStreamAbort(dest, error); + } + return Promise.resolve(); + }); + } + if (preventCancel === false) { + actions.push(() => { + if (source[shared.state_] === "readable") { + return rs.readableStreamCancel(source, error); + } + return Promise.resolve(); + }); + } + shutDown( + () => { + return Promise.all(actions.map(a => a())).then(_ => undefined); + }, + { actualError: error } + ); + }; + + if (signal.aborted === true) { + abortAlgorithm(); + } else { + signal.addEventListener("abort", abortAlgorithm); + } + } + + function onStreamErrored( + stream: rs.SDReadableStream | ws.WritableStream, + promise: Promise, + action: (error: shared.ErrorResult) => void + ) { + if (stream[shared.state_] === "errored") { + action(stream[shared.storedError_]); + } else { + promise.catch(action); + } + } + + function onStreamClosed( + stream: rs.SDReadableStream | ws.WritableStream, + promise: Promise, + action: () => void + ) { + if (stream[shared.state_] === "closed") { + action(); + } else { + promise.then(action); + } + } + + onStreamErrored(source, reader[rs.closedPromise_].promise, error => { + if (!preventAbort) { + shutDown(() => ws.writableStreamAbort(dest, error), { + actualError: error + }); + } else { + shutDown(undefined, { actualError: error }); + } + }); + + onStreamErrored(dest, writer[ws.closedPromise_].promise, error => { + if (!preventCancel) { + shutDown(() => rs.readableStreamCancel(source, error), { + actualError: error + }); + } else { + shutDown(undefined, { actualError: error }); + } + }); + + onStreamClosed(source, reader[rs.closedPromise_].promise, () => { + if (!preventClose) { + shutDown(() => + ws.writableStreamDefaultWriterCloseWithErrorPropagation(writer) + ); + } else { + shutDown(); + } + }); + + if ( + ws.writableStreamCloseQueuedOrInFlight(dest) || + dest[shared.state_] === "closed" + ) { + // Assert: no chunks have been read or written. + const destClosed = new TypeError(); + if (!preventCancel) { + shutDown(() => rs.readableStreamCancel(source, destClosed), { + actualError: destClosed + }); + } else { + shutDown(undefined, { actualError: destClosed }); + } + } + + function awaitLatestWrite(): Promise { + const curLatestWrite = latestWrite; + return latestWrite.then(() => + curLatestWrite === latestWrite ? undefined : awaitLatestWrite() + ); + } + + function flushRemainder() { + if ( + dest[shared.state_] === "writable" && + !ws.writableStreamCloseQueuedOrInFlight(dest) + ) { + return awaitLatestWrite(); + } else { + return undefined; + } + } + + function shutDown(action?: () => Promise, error?: ErrorWrapper) { + if (shuttingDown) { + return; + } + shuttingDown = true; + + if (action === undefined) { + action = () => Promise.resolve(); + } + + function finishShutDown() { + action!().then( + _ => finalize(error), + newError => finalize({ actualError: newError }) + ); + } + + const flushWait = flushRemainder(); + if (flushWait) { + flushWait.then(finishShutDown); + } else { + finishShutDown(); + } + } + + function finalize(error?: ErrorWrapper) { + ws.writableStreamDefaultWriterRelease(writer); + rs.readableStreamReaderGenericRelease(reader); + if (signal && abortAlgorithm) { + signal.removeEventListener("abort", abortAlgorithm); + } + if (error) { + promise.reject(error.actualError); + } else { + promise.resolve(undefined); + } + } + + function next() { + if (shuttingDown) { + return; + } + + writer[ws.readyPromise_].promise.then(() => { + rs.readableStreamDefaultReaderRead(reader).then( + ({ value, done }) => { + if (done) { + return; + } + latestWrite = ws + .writableStreamDefaultWriterWrite(writer, value!) + .catch(() => {}); + next(); + }, + _error => { + latestWrite = Promise.resolve(); + } + ); + }); + } + + next(); + + return promise.promise; +} diff --git a/cli/js/streams/queue-mixin.ts b/cli/js/streams/queue-mixin.ts new file mode 100644 index 00000000000000..0fa47893368c50 --- /dev/null +++ b/cli/js/streams/queue-mixin.ts @@ -0,0 +1,81 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/queue-mixin - internal queue operations for stream controllers + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import { Queue, QueueImpl } from "./queue.ts"; +import { isFiniteNonNegativeNumber } from "./shared-internals.ts"; + +export const queue_ = Symbol("queue_"); +export const queueTotalSize_ = Symbol("queueTotalSize_"); + +export interface QueueElement { + value: V; + size: number; +} + +export interface QueueContainer { + [queue_]: Queue>; + [queueTotalSize_]: number; +} + +export interface ByteQueueContainer { + [queue_]: Queue<{ + buffer: ArrayBufferLike; + byteOffset: number; + byteLength: number; + }>; + [queueTotalSize_]: number; +} + +export function dequeueValue(container: QueueContainer) { + // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. + // Assert: container.[[queue]] is not empty. + const pair = container[queue_].shift()!; + const newTotalSize = container[queueTotalSize_] - pair.size; + container[queueTotalSize_] = Math.max(0, newTotalSize); // < 0 can occur due to rounding errors. + return pair.value; +} + +export function enqueueValueWithSize( + container: QueueContainer, + value: V, + size: number +) { + // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. + if (!isFiniteNonNegativeNumber(size)) { + throw new RangeError("Chunk size must be a non-negative, finite numbers"); + } + container[queue_].push({ value, size }); + container[queueTotalSize_] += size; +} + +export function peekQueueValue(container: QueueContainer) { + // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. + // Assert: container.[[queue]] is not empty. + return container[queue_].front()!.value; +} + +export function resetQueue( + container: ByteQueueContainer | QueueContainer +) { + // Chrome (as of v67) has a steep performance cliff with large arrays + // and shift(), around about 50k elements. While this is an unusual case + // we use a simple wrapper around shift and push that is chunked to + // avoid this pitfall. + // @see: https://github.com/stardazed/sd-streams/issues/1 + container[queue_] = new QueueImpl(); + + // The code below can be used as a plain array implementation of the + // Queue interface. + // const q = [] as any; + // q.front = function() { return this[0]; }; + // container[queue_] = q; + + container[queueTotalSize_] = 0; +} diff --git a/cli/js/streams/queue.ts b/cli/js/streams/queue.ts new file mode 100644 index 00000000000000..e243ba169b4e08 --- /dev/null +++ b/cli/js/streams/queue.ts @@ -0,0 +1,65 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/queue - simple queue type with chunked array backing + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +const CHUNK_SIZE = 16384; + +export interface Queue { + push(t: T): void; + shift(): T | undefined; + front(): T | undefined; + readonly length: number; +} + +export class QueueImpl implements Queue { + private readonly chunks_: T[][]; + private readChunk_: T[]; + private writeChunk_: T[]; + private length_: number; + + constructor() { + this.chunks_ = [[]]; + this.readChunk_ = this.writeChunk_ = this.chunks_[0]; + this.length_ = 0; + } + + push(t: T): void { + this.writeChunk_.push(t); + this.length_ += 1; + if (this.writeChunk_.length === CHUNK_SIZE) { + this.writeChunk_ = []; + this.chunks_.push(this.writeChunk_); + } + } + + front(): T | undefined { + if (this.length_ === 0) { + return undefined; + } + return this.readChunk_[0]; + } + + shift(): T | undefined { + if (this.length_ === 0) { + return undefined; + } + const t = this.readChunk_.shift(); + + this.length_ -= 1; + if (this.readChunk_.length === 0 && this.readChunk_ !== this.writeChunk_) { + this.chunks_.shift(); + this.readChunk_ = this.chunks_[0]; + } + return t; + } + + get length() { + return this.length_; + } +} diff --git a/cli/js/streams/readable-byte-stream-controller.ts b/cli/js/streams/readable-byte-stream-controller.ts new file mode 100644 index 00000000000000..0ac1659feff764 --- /dev/null +++ b/cli/js/streams/readable-byte-stream-controller.ts @@ -0,0 +1,209 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-byte-stream-controller - ReadableByteStreamController class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as q from "./queue-mixin.ts"; +import * as shared from "./shared-internals.ts"; +import { ReadableStreamBYOBRequest } from "./readable-stream-byob-request.ts"; +import { Queue } from "./queue.ts"; +import { UnderlyingByteSource } from "../dom_types.ts"; + +export class ReadableByteStreamController + implements rs.SDReadableByteStreamController { + [rs.autoAllocateChunkSize_]: number | undefined; + [rs.byobRequest_]: rs.SDReadableStreamBYOBRequest | undefined; + [rs.cancelAlgorithm_]: rs.CancelAlgorithm; + [rs.closeRequested_]: boolean; + [rs.controlledReadableByteStream_]: rs.SDReadableStream; + [rs.pullAgain_]: boolean; + [rs.pullAlgorithm_]: rs.PullAlgorithm; + [rs.pulling_]: boolean; + [rs.pendingPullIntos_]: rs.PullIntoDescriptor[]; + [rs.started_]: boolean; + [rs.strategyHWM_]: number; + + [q.queue_]: Queue<{ + buffer: ArrayBufferLike; + byteOffset: number; + byteLength: number; + }>; + [q.queueTotalSize_]: number; + + constructor() { + throw new TypeError(); + } + + get byobRequest(): rs.SDReadableStreamBYOBRequest | undefined { + if (!rs.isReadableByteStreamController(this)) { + throw new TypeError(); + } + if ( + this[rs.byobRequest_] === undefined && + this[rs.pendingPullIntos_].length > 0 + ) { + const firstDescriptor = this[rs.pendingPullIntos_][0]; + const view = new Uint8Array( + firstDescriptor.buffer, + firstDescriptor.byteOffset + firstDescriptor.bytesFilled, + firstDescriptor.byteLength - firstDescriptor.bytesFilled + ); + const byobRequest = Object.create( + ReadableStreamBYOBRequest.prototype + ) as ReadableStreamBYOBRequest; + rs.setUpReadableStreamBYOBRequest(byobRequest, this, view); + this[rs.byobRequest_] = byobRequest; + } + return this[rs.byobRequest_]; + } + + get desiredSize(): number | null { + if (!rs.isReadableByteStreamController(this)) { + throw new TypeError(); + } + return rs.readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!rs.isReadableByteStreamController(this)) { + throw new TypeError(); + } + if (this[rs.closeRequested_]) { + throw new TypeError("Stream is already closing"); + } + if (this[rs.controlledReadableByteStream_][shared.state_] !== "readable") { + throw new TypeError("Stream is closed or errored"); + } + rs.readableByteStreamControllerClose(this); + } + + enqueue(chunk: ArrayBufferView) { + if (!rs.isReadableByteStreamController(this)) { + throw new TypeError(); + } + if (this[rs.closeRequested_]) { + throw new TypeError("Stream is already closing"); + } + if (this[rs.controlledReadableByteStream_][shared.state_] !== "readable") { + throw new TypeError("Stream is closed or errored"); + } + if (!ArrayBuffer.isView(chunk)) { + throw new TypeError("chunk must be a valid ArrayBufferView"); + } + // If ! IsDetachedBuffer(chunk.[[ViewedArrayBuffer]]) is true, throw a TypeError exception. + return rs.readableByteStreamControllerEnqueue(this, chunk); + } + + error(error?: shared.ErrorResult) { + if (!rs.isReadableByteStreamController(this)) { + throw new TypeError(); + } + rs.readableByteStreamControllerError(this, error); + } + + [rs.cancelSteps_](reason: shared.ErrorResult) { + if (this[rs.pendingPullIntos_].length > 0) { + const firstDescriptor = this[rs.pendingPullIntos_][0]; + firstDescriptor.bytesFilled = 0; + } + q.resetQueue(this); + const result = this[rs.cancelAlgorithm_](reason); + rs.readableByteStreamControllerClearAlgorithms(this); + return result; + } + + [rs.pullSteps_](forAuthorCode: boolean) { + const stream = this[rs.controlledReadableByteStream_]; + // Assert: ! ReadableStreamHasDefaultReader(stream) is true. + if (this[q.queueTotalSize_] > 0) { + // Assert: ! ReadableStreamGetNumReadRequests(stream) is 0. + const entry = this[q.queue_].shift()!; + this[q.queueTotalSize_] -= entry.byteLength; + rs.readableByteStreamControllerHandleQueueDrain(this); + const view = new Uint8Array( + entry.buffer, + entry.byteOffset, + entry.byteLength + ); + return Promise.resolve( + rs.readableStreamCreateReadResult(view, false, forAuthorCode) + ); + } + const autoAllocateChunkSize = this[rs.autoAllocateChunkSize_]; + if (autoAllocateChunkSize !== undefined) { + let buffer: ArrayBuffer; + try { + buffer = new ArrayBuffer(autoAllocateChunkSize); + } catch (error) { + return Promise.reject(error); + } + const pullIntoDescriptor: rs.PullIntoDescriptor = { + buffer, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + elementSize: 1, + ctor: Uint8Array, + readerType: "default" + }; + this[rs.pendingPullIntos_].push(pullIntoDescriptor); + } + + const promise = rs.readableStreamAddReadRequest(stream, forAuthorCode); + rs.readableByteStreamControllerCallPullIfNeeded(this); + return promise; + } +} + +export function setUpReadableByteStreamControllerFromUnderlyingSource( + stream: rs.SDReadableStream, + underlyingByteSource: UnderlyingByteSource, + highWaterMark: number +) { + // Assert: underlyingByteSource is not undefined. + const controller = Object.create( + ReadableByteStreamController.prototype + ) as ReadableByteStreamController; + + const startAlgorithm = () => { + return shared.invokeOrNoop(underlyingByteSource, "start", [controller]); + }; + const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingByteSource, + "pull", + [controller] + ); + const cancelAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingByteSource, + "cancel", + [] + ); + + let autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize; + if (autoAllocateChunkSize !== undefined) { + autoAllocateChunkSize = Number(autoAllocateChunkSize); + if ( + !shared.isInteger(autoAllocateChunkSize) || + autoAllocateChunkSize <= 0 + ) { + throw new RangeError( + "autoAllocateChunkSize must be a positive, finite integer" + ); + } + } + rs.setUpReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize + ); +} diff --git a/cli/js/streams/readable-internals.ts b/cli/js/streams/readable-internals.ts new file mode 100644 index 00000000000000..0ec8375ab36fd4 --- /dev/null +++ b/cli/js/streams/readable-internals.ts @@ -0,0 +1,1355 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-internals - internal types and functions for readable streams + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; +import * as q from "./queue-mixin.ts"; +import { + PipeOptions, + QueuingStrategy, + QueuingStrategySizeCallback, + UnderlyingSource, + UnderlyingByteSource +} from "../dom_types.ts"; + +// ReadableStreamDefaultController +export const controlledReadableStream_ = Symbol("controlledReadableStream_"); +export const pullAlgorithm_ = Symbol("pullAlgorithm_"); +export const cancelAlgorithm_ = Symbol("cancelAlgorithm_"); +export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_"); +export const strategyHWM_ = Symbol("strategyHWM_"); +export const started_ = Symbol("started_"); +export const closeRequested_ = Symbol("closeRequested_"); +export const pullAgain_ = Symbol("pullAgain_"); +export const pulling_ = Symbol("pulling_"); +export const cancelSteps_ = Symbol("cancelSteps_"); +export const pullSteps_ = Symbol("pullSteps_"); + +// ReadableByteStreamController +export const autoAllocateChunkSize_ = Symbol("autoAllocateChunkSize_"); +export const byobRequest_ = Symbol("byobRequest_"); +export const controlledReadableByteStream_ = Symbol( + "controlledReadableByteStream_" +); +export const pendingPullIntos_ = Symbol("pendingPullIntos_"); + +// ReadableStreamDefaultReader +export const closedPromise_ = Symbol("closedPromise_"); +export const ownerReadableStream_ = Symbol("ownerReadableStream_"); +export const readRequests_ = Symbol("readRequests_"); +export const readIntoRequests_ = Symbol("readIntoRequests_"); + +// ReadableStreamBYOBRequest +export const associatedReadableByteStreamController_ = Symbol( + "associatedReadableByteStreamController_" +); +export const view_ = Symbol("view_"); + +// ReadableStreamBYOBReader + +// ReadableStream +export const reader_ = Symbol("reader_"); +export const readableStreamController_ = Symbol("readableStreamController_"); + +export type StartFunction = ( + controller: SDReadableStreamControllerBase +) => void | PromiseLike; +export type StartAlgorithm = () => Promise | void; +export type PullFunction = ( + controller: SDReadableStreamControllerBase +) => void | PromiseLike; +export type PullAlgorithm = ( + controller: SDReadableStreamControllerBase +) => PromiseLike; +export type CancelAlgorithm = (reason?: shared.ErrorResult) => Promise; + +// ---- + +export interface SDReadableStreamControllerBase { + readonly desiredSize: number | null; + close(): void; + error(e?: shared.ErrorResult): void; + + [cancelSteps_](reason: shared.ErrorResult): Promise; + [pullSteps_](forAuthorCode: boolean): Promise>; +} + +export interface SDReadableStreamBYOBRequest { + readonly view: ArrayBufferView; + respond(bytesWritten: number): void; + respondWithNewView(view: ArrayBufferView): void; + + [associatedReadableByteStreamController_]: + | SDReadableByteStreamController + | undefined; + [view_]: ArrayBufferView | undefined; +} + +interface ArrayBufferViewCtor { + new ( + buffer: ArrayBufferLike, + byteOffset?: number, + byteLength?: number + ): ArrayBufferView; +} + +export interface PullIntoDescriptor { + readerType: "default" | "byob"; + ctor: ArrayBufferViewCtor; + buffer: ArrayBufferLike; + byteOffset: number; + byteLength: number; + bytesFilled: number; + elementSize: number; +} + +export interface SDReadableByteStreamController + extends SDReadableStreamControllerBase, + q.ByteQueueContainer { + readonly byobRequest: SDReadableStreamBYOBRequest | undefined; + enqueue(chunk: ArrayBufferView): void; + + [autoAllocateChunkSize_]: number | undefined; // A positive integer, when the automatic buffer allocation feature is enabled. In that case, this value specifies the size of buffer to allocate. It is undefined otherwise. + [byobRequest_]: SDReadableStreamBYOBRequest | undefined; // A ReadableStreamBYOBRequest instance representing the current BYOB pull request + [cancelAlgorithm_]: CancelAlgorithm; // A promise-returning algorithm, taking one argument (the cancel reason), which communicates a requested cancelation to the underlying source + [closeRequested_]: boolean; // A boolean flag indicating whether the stream has been closed by its underlying byte source, but still has chunks in its internal queue that have not yet been read + [controlledReadableByteStream_]: SDReadableStream; // The ReadableStream instance controlled + [pullAgain_]: boolean; // A boolean flag set to true if the stream’s mechanisms requested a call to the underlying byte source’s pull() method to pull more data, but the pull could not yet be done since a previous call is still executing + [pullAlgorithm_]: PullAlgorithm; // A promise-returning algorithm that pulls data from the underlying source + [pulling_]: boolean; // A boolean flag set to true while the underlying byte source’s pull() method is executing and has not yet fulfilled, used to prevent reentrant calls + [pendingPullIntos_]: PullIntoDescriptor[]; // A List of descriptors representing pending BYOB pull requests + [started_]: boolean; // A boolean flag indicating whether the underlying source has finished starting + [strategyHWM_]: number; // A number supplied to the constructor as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying byte source +} + +export interface SDReadableStreamDefaultController + extends SDReadableStreamControllerBase, + q.QueueContainer { + enqueue(chunk?: OutputType): void; + + [controlledReadableStream_]: SDReadableStream; + [pullAlgorithm_]: PullAlgorithm; + [cancelAlgorithm_]: CancelAlgorithm; + [strategySizeAlgorithm_]: QueuingStrategySizeCallback; + [strategyHWM_]: number; + + [started_]: boolean; + [closeRequested_]: boolean; + [pullAgain_]: boolean; + [pulling_]: boolean; +} + +// ---- + +export interface SDReadableStreamReader { + readonly closed: Promise; + cancel(reason: shared.ErrorResult): Promise; + releaseLock(): void; + + [ownerReadableStream_]: SDReadableStream | undefined; + [closedPromise_]: shared.ControlledPromise; +} + +export interface ReadRequest extends shared.ControlledPromise { + forAuthorCode: boolean; +} + +export declare class SDReadableStreamDefaultReader + implements SDReadableStreamReader { + constructor(stream: SDReadableStream); + + readonly closed: Promise; + cancel(reason: shared.ErrorResult): Promise; + releaseLock(): void; + read(): Promise>; + + [ownerReadableStream_]: SDReadableStream | undefined; + [closedPromise_]: shared.ControlledPromise; + [readRequests_]: ReadRequest>[]; +} + +export declare class SDReadableStreamBYOBReader + implements SDReadableStreamReader { + constructor(stream: SDReadableStream); + + readonly closed: Promise; + cancel(reason: shared.ErrorResult): Promise; + releaseLock(): void; + read(view: ArrayBufferView): Promise>; + + [ownerReadableStream_]: SDReadableStream | undefined; + [closedPromise_]: shared.ControlledPromise; + [readIntoRequests_]: ReadRequest>[]; +} + +// ---- + +export interface GenericTransformStream { + readable: SDReadableStream; + writable: ws.WritableStream; +} + +export type ReadableStreamState = "readable" | "closed" | "errored"; + +export declare class SDReadableStream { + constructor( + underlyingSource: UnderlyingByteSource, + strategy?: { highWaterMark?: number; size?: undefined } + ); + constructor( + underlyingSource?: UnderlyingSource, + strategy?: QueuingStrategy + ); + + readonly locked: boolean; + cancel(reason?: shared.ErrorResult): Promise; + getReader(): SDReadableStreamReader; + getReader(options: { mode: "byob" }): SDReadableStreamBYOBReader; + tee(): SDReadableStream[]; + + pipeThrough( + transform: GenericTransformStream, + options?: PipeOptions + ): SDReadableStream; + pipeTo( + dest: ws.WritableStream, + options?: PipeOptions + ): Promise; + + [shared.state_]: ReadableStreamState; + [shared.storedError_]: shared.ErrorResult; + [reader_]: SDReadableStreamReader | undefined; + [readableStreamController_]: SDReadableStreamControllerBase; +} + +// ---- Stream + +export function initializeReadableStream( + stream: SDReadableStream +) { + stream[shared.state_] = "readable"; + stream[reader_] = undefined; + stream[shared.storedError_] = undefined; + stream[readableStreamController_] = undefined!; // mark slot as used for brand check +} + +export function isReadableStream( + value: unknown +): value is SDReadableStream { + if (typeof value !== "object" || value === null) { + return false; + } + return readableStreamController_ in value; +} + +export function isReadableStreamLocked( + stream: SDReadableStream +) { + return stream[reader_] !== undefined; +} + +export function readableStreamGetNumReadIntoRequests( + stream: SDReadableStream +) { + // TODO remove the "as unknown" cast + // This is in to workaround a compiler error + // error TS2352: Conversion of type 'SDReadableStreamReader' to type 'SDReadableStreamBYOBReader' may be a mistake because neither type sufficiently overlaps with the other. If this was intentional, convert the expression to 'unknown' first. + // Type 'SDReadableStreamReader' is missing the following properties from type 'SDReadableStreamBYOBReader': read, [readIntoRequests_] + const reader = (stream[reader_] as unknown) as SDReadableStreamBYOBReader; + if (reader === undefined) { + return 0; + } + return reader[readIntoRequests_].length; +} + +export function readableStreamGetNumReadRequests( + stream: SDReadableStream +) { + const reader = stream[reader_] as SDReadableStreamDefaultReader; + if (reader === undefined) { + return 0; + } + return reader[readRequests_].length; +} + +export function readableStreamCreateReadResult( + value: T, + done: boolean, + forAuthorCode: boolean +): IteratorResult { + const prototype = forAuthorCode ? Object.prototype : null; + const result = Object.create(prototype); + result.value = value; + result.done = done; + return result; +} + +export function readableStreamAddReadIntoRequest( + stream: SDReadableStream, + forAuthorCode: boolean +) { + // Assert: ! IsReadableStreamBYOBReader(stream.[[reader]]) is true. + // Assert: stream.[[state]] is "readable" or "closed". + const reader = stream[reader_] as SDReadableStreamBYOBReader; + const conProm = shared.createControlledPromise< + IteratorResult + >() as ReadRequest>; + conProm.forAuthorCode = forAuthorCode; + reader[readIntoRequests_].push(conProm); + return conProm.promise; +} + +export function readableStreamAddReadRequest( + stream: SDReadableStream, + forAuthorCode: boolean +) { + // Assert: ! IsReadableStreamDefaultReader(stream.[[reader]]) is true. + // Assert: stream.[[state]] is "readable". + const reader = stream[reader_] as SDReadableStreamDefaultReader; + const conProm = shared.createControlledPromise< + IteratorResult + >() as ReadRequest>; + conProm.forAuthorCode = forAuthorCode; + reader[readRequests_].push(conProm); + return conProm.promise; +} + +export function readableStreamHasBYOBReader( + stream: SDReadableStream +) { + const reader = stream[reader_]; + return isReadableStreamBYOBReader(reader); +} + +export function readableStreamHasDefaultReader( + stream: SDReadableStream +) { + const reader = stream[reader_]; + return isReadableStreamDefaultReader(reader); +} + +export function readableStreamCancel( + stream: SDReadableStream, + reason: shared.ErrorResult +) { + if (stream[shared.state_] === "closed") { + return Promise.resolve(undefined); + } + if (stream[shared.state_] === "errored") { + return Promise.reject(stream[shared.storedError_]); + } + readableStreamClose(stream); + + const sourceCancelPromise = stream[readableStreamController_][cancelSteps_]( + reason + ); + return sourceCancelPromise.then(_ => undefined); +} + +export function readableStreamClose( + stream: SDReadableStream +) { + // Assert: stream.[[state]] is "readable". + stream[shared.state_] = "closed"; + const reader = stream[reader_]; + if (reader === undefined) { + return; + } + + if (isReadableStreamDefaultReader(reader)) { + for (const readRequest of reader[readRequests_]) { + readRequest.resolve( + readableStreamCreateReadResult( + undefined, + true, + readRequest.forAuthorCode + ) + ); + } + reader[readRequests_] = []; + } + reader[closedPromise_].resolve(); + reader[closedPromise_].promise.catch(() => {}); +} + +export function readableStreamError( + stream: SDReadableStream, + error: shared.ErrorResult +) { + if (stream[shared.state_] !== "readable") { + throw new RangeError("Stream is in an invalid state"); + } + stream[shared.state_] = "errored"; + stream[shared.storedError_] = error; + + const reader = stream[reader_]; + if (reader === undefined) { + return; + } + if (isReadableStreamDefaultReader(reader)) { + for (const readRequest of reader[readRequests_]) { + readRequest.reject(error); + } + reader[readRequests_] = []; + } else { + // Assert: IsReadableStreamBYOBReader(reader). + // TODO remove the "as unknown" cast + const readIntoRequests = ((reader as unknown) as SDReadableStreamBYOBReader)[ + readIntoRequests_ + ]; + for (const readIntoRequest of readIntoRequests) { + readIntoRequest.reject(error); + } + // TODO remove the "as unknown" cast + ((reader as unknown) as SDReadableStreamBYOBReader)[readIntoRequests_] = []; + } + + reader[closedPromise_].reject(error); +} + +// ---- Readers + +export function isReadableStreamDefaultReader( + reader: unknown +): reader is SDReadableStreamDefaultReader { + if (typeof reader !== "object" || reader === null) { + return false; + } + return readRequests_ in reader; +} + +export function isReadableStreamBYOBReader( + reader: unknown +): reader is SDReadableStreamBYOBReader { + if (typeof reader !== "object" || reader === null) { + return false; + } + return readIntoRequests_ in reader; +} + +export function readableStreamReaderGenericInitialize( + reader: SDReadableStreamReader, + stream: SDReadableStream +) { + reader[ownerReadableStream_] = stream; + stream[reader_] = reader; + const streamState = stream[shared.state_]; + + reader[closedPromise_] = shared.createControlledPromise(); + if (streamState === "readable") { + // leave as is + } else if (streamState === "closed") { + reader[closedPromise_].resolve(undefined); + } else { + reader[closedPromise_].reject(stream[shared.storedError_]); + reader[closedPromise_].promise.catch(() => {}); + } +} + +export function readableStreamReaderGenericRelease( + reader: SDReadableStreamReader +) { + // Assert: reader.[[ownerReadableStream]] is not undefined. + // Assert: reader.[[ownerReadableStream]].[[reader]] is reader. + const stream = reader[ownerReadableStream_]; + if (stream === undefined) { + throw new TypeError("Reader is in an inconsistent state"); + } + + if (stream[shared.state_] === "readable") { + // code moved out + } else { + reader[closedPromise_] = shared.createControlledPromise(); + } + reader[closedPromise_].reject(new TypeError()); + reader[closedPromise_].promise.catch(() => {}); + + stream[reader_] = undefined; + reader[ownerReadableStream_] = undefined; +} + +export function readableStreamBYOBReaderRead( + reader: SDReadableStreamBYOBReader, + view: ArrayBufferView, + forAuthorCode = false +) { + const stream = reader[ownerReadableStream_]!; + // Assert: stream is not undefined. + + if (stream[shared.state_] === "errored") { + return Promise.reject(stream[shared.storedError_]); + } + return readableByteStreamControllerPullInto( + stream[readableStreamController_] as SDReadableByteStreamController, + view, + forAuthorCode + ); +} + +export function readableStreamDefaultReaderRead( + reader: SDReadableStreamDefaultReader, + forAuthorCode = false +): Promise> { + const stream = reader[ownerReadableStream_]!; + // Assert: stream is not undefined. + + if (stream[shared.state_] === "closed") { + return Promise.resolve( + readableStreamCreateReadResult(undefined, true, forAuthorCode) + ); + } + if (stream[shared.state_] === "errored") { + return Promise.reject(stream[shared.storedError_]); + } + // Assert: stream.[[state]] is "readable". + return stream[readableStreamController_][pullSteps_](forAuthorCode); +} + +export function readableStreamFulfillReadIntoRequest( + stream: SDReadableStream, + chunk: ArrayBufferView, + done: boolean +) { + // TODO remove the "as unknown" cast + const reader = (stream[reader_] as unknown) as SDReadableStreamBYOBReader; + const readIntoRequest = reader[readIntoRequests_].shift()!; // <-- length check done in caller + readIntoRequest.resolve( + readableStreamCreateReadResult(chunk, done, readIntoRequest.forAuthorCode) + ); +} + +export function readableStreamFulfillReadRequest( + stream: SDReadableStream, + chunk: OutputType, + done: boolean +) { + const reader = stream[reader_] as SDReadableStreamDefaultReader; + const readRequest = reader[readRequests_].shift()!; // <-- length check done in caller + readRequest.resolve( + readableStreamCreateReadResult(chunk, done, readRequest.forAuthorCode) + ); +} + +// ---- DefaultController + +export function setUpReadableStreamDefaultController( + stream: SDReadableStream, + controller: SDReadableStreamDefaultController, + startAlgorithm: StartAlgorithm, + pullAlgorithm: PullAlgorithm, + cancelAlgorithm: CancelAlgorithm, + highWaterMark: number, + sizeAlgorithm: QueuingStrategySizeCallback +) { + // Assert: stream.[[readableStreamController]] is undefined. + controller[controlledReadableStream_] = stream; + q.resetQueue(controller); + controller[started_] = false; + controller[closeRequested_] = false; + controller[pullAgain_] = false; + controller[pulling_] = false; + controller[strategySizeAlgorithm_] = sizeAlgorithm; + controller[strategyHWM_] = highWaterMark; + controller[pullAlgorithm_] = pullAlgorithm; + controller[cancelAlgorithm_] = cancelAlgorithm; + stream[readableStreamController_] = controller; + + const startResult = startAlgorithm(); + Promise.resolve(startResult).then( + _ => { + controller[started_] = true; + // Assert: controller.[[pulling]] is false. + // Assert: controller.[[pullAgain]] is false. + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + error => { + readableStreamDefaultControllerError(controller, error); + } + ); +} + +export function isReadableStreamDefaultController( + value: unknown +): value is SDReadableStreamDefaultController { + if (typeof value !== "object" || value === null) { + return false; + } + return controlledReadableStream_ in value; +} + +export function readableStreamDefaultControllerHasBackpressure( + controller: SDReadableStreamDefaultController +) { + return !readableStreamDefaultControllerShouldCallPull(controller); +} + +export function readableStreamDefaultControllerCanCloseOrEnqueue( + controller: SDReadableStreamDefaultController +) { + const state = controller[controlledReadableStream_][shared.state_]; + return controller[closeRequested_] === false && state === "readable"; +} + +export function readableStreamDefaultControllerGetDesiredSize( + controller: SDReadableStreamDefaultController +) { + const state = controller[controlledReadableStream_][shared.state_]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[strategyHWM_] - controller[q.queueTotalSize_]; +} + +export function readableStreamDefaultControllerClose( + controller: SDReadableStreamDefaultController +) { + // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true. + controller[closeRequested_] = true; + const stream = controller[controlledReadableStream_]; + if (controller[q.queue_].length === 0) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } +} + +export function readableStreamDefaultControllerEnqueue( + controller: SDReadableStreamDefaultController, + chunk: OutputType +) { + const stream = controller[controlledReadableStream_]; + // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true. + if ( + isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + // Let result be the result of performing controller.[[strategySizeAlgorithm]], passing in chunk, + // and interpreting the result as an ECMAScript completion value. + // impl note: assuming that in JS land this just means try/catch with rethrow + let chunkSize: number; + try { + chunkSize = controller[strategySizeAlgorithm_](chunk); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + try { + q.enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +export function readableStreamDefaultControllerError( + controller: SDReadableStreamDefaultController, + error: shared.ErrorResult +) { + const stream = controller[controlledReadableStream_]; + if (stream[shared.state_] !== "readable") { + return; + } + q.resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +export function readableStreamDefaultControllerCallPullIfNeeded( + controller: SDReadableStreamDefaultController +) { + if (!readableStreamDefaultControllerShouldCallPull(controller)) { + return; + } + if (controller[pulling_]) { + controller[pullAgain_] = true; + return; + } + if (controller[pullAgain_]) { + throw new RangeError("Stream controller is in an invalid state."); + } + + controller[pulling_] = true; + controller[pullAlgorithm_](controller).then( + _ => { + controller[pulling_] = false; + if (controller[pullAgain_]) { + controller[pullAgain_] = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + error => { + readableStreamDefaultControllerError(controller, error); + } + ); +} + +export function readableStreamDefaultControllerShouldCallPull( + controller: SDReadableStreamDefaultController +) { + const stream = controller[controlledReadableStream_]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return false; + } + if (controller[started_] === false) { + return false; + } + if ( + isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller); + if (desiredSize === null) { + throw new RangeError("Stream is in an invalid state."); + } + return desiredSize > 0; +} + +export function readableStreamDefaultControllerClearAlgorithms( + controller: SDReadableStreamDefaultController +) { + controller[pullAlgorithm_] = undefined!; + controller[cancelAlgorithm_] = undefined!; + controller[strategySizeAlgorithm_] = undefined!; +} + +// ---- BYOBController + +export function setUpReadableByteStreamController( + stream: SDReadableStream, + controller: SDReadableByteStreamController, + startAlgorithm: StartAlgorithm, + pullAlgorithm: PullAlgorithm, + cancelAlgorithm: CancelAlgorithm, + highWaterMark: number, + autoAllocateChunkSize: number | undefined +) { + // Assert: stream.[[readableStreamController]] is undefined. + if (stream[readableStreamController_] !== undefined) { + throw new TypeError("Cannot reuse streams"); + } + if (autoAllocateChunkSize !== undefined) { + if ( + !shared.isInteger(autoAllocateChunkSize) || + autoAllocateChunkSize <= 0 + ) { + throw new RangeError( + "autoAllocateChunkSize must be a positive, finite integer" + ); + } + } + // Set controller.[[controlledReadableByteStream]] to stream. + controller[controlledReadableByteStream_] = stream; + // Set controller.[[pullAgain]] and controller.[[pulling]] to false. + controller[pullAgain_] = false; + controller[pulling_] = false; + readableByteStreamControllerClearPendingPullIntos(controller); + q.resetQueue(controller); + controller[closeRequested_] = false; + controller[started_] = false; + controller[strategyHWM_] = shared.validateAndNormalizeHighWaterMark( + highWaterMark + ); + controller[pullAlgorithm_] = pullAlgorithm; + controller[cancelAlgorithm_] = cancelAlgorithm; + controller[autoAllocateChunkSize_] = autoAllocateChunkSize; + controller[pendingPullIntos_] = []; + stream[readableStreamController_] = controller; + + // Let startResult be the result of performing startAlgorithm. + const startResult = startAlgorithm(); + Promise.resolve(startResult).then( + _ => { + controller[started_] = true; + // Assert: controller.[[pulling]] is false. + // Assert: controller.[[pullAgain]] is false. + readableByteStreamControllerCallPullIfNeeded(controller); + }, + error => { + readableByteStreamControllerError(controller, error); + } + ); +} + +export function isReadableStreamBYOBRequest( + value: unknown +): value is SDReadableStreamBYOBRequest { + if (typeof value !== "object" || value === null) { + return false; + } + return associatedReadableByteStreamController_ in value; +} + +export function isReadableByteStreamController( + value: unknown +): value is SDReadableByteStreamController { + if (typeof value !== "object" || value === null) { + return false; + } + return controlledReadableByteStream_ in value; +} + +export function readableByteStreamControllerCallPullIfNeeded( + controller: SDReadableByteStreamController +) { + if (!readableByteStreamControllerShouldCallPull(controller)) { + return; + } + if (controller[pulling_]) { + controller[pullAgain_] = true; + return; + } + // Assert: controller.[[pullAgain]] is false. + controller[pulling_] = true; + controller[pullAlgorithm_](controller).then( + _ => { + controller[pulling_] = false; + if (controller[pullAgain_]) { + controller[pullAgain_] = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + error => { + readableByteStreamControllerError(controller, error); + } + ); +} + +export function readableByteStreamControllerClearAlgorithms( + controller: SDReadableByteStreamController +) { + controller[pullAlgorithm_] = undefined!; + controller[cancelAlgorithm_] = undefined!; +} + +export function readableByteStreamControllerClearPendingPullIntos( + controller: SDReadableByteStreamController +) { + readableByteStreamControllerInvalidateBYOBRequest(controller); + controller[pendingPullIntos_] = []; +} + +export function readableByteStreamControllerClose( + controller: SDReadableByteStreamController +) { + const stream = controller[controlledReadableByteStream_]; + // Assert: controller.[[closeRequested]] is false. + // Assert: stream.[[state]] is "readable". + if (controller[q.queueTotalSize_] > 0) { + controller[closeRequested_] = true; + return; + } + if (controller[pendingPullIntos_].length > 0) { + const firstPendingPullInto = controller[pendingPullIntos_][0]; + if (firstPendingPullInto.bytesFilled > 0) { + const error = new TypeError(); + readableByteStreamControllerError(controller, error); + throw error; + } + } + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); +} + +export function readableByteStreamControllerCommitPullIntoDescriptor( + stream: SDReadableStream, + pullIntoDescriptor: PullIntoDescriptor +) { + // Assert: stream.[[state]] is not "errored". + let done = false; + if (stream[shared.state_] === "closed") { + // Assert: pullIntoDescriptor.[[bytesFilled]] is 0. + done = true; + } + const filledView = readableByteStreamControllerConvertPullIntoDescriptor( + pullIntoDescriptor + ); + if (pullIntoDescriptor.readerType === "default") { + readableStreamFulfillReadRequest(stream, filledView, done); + } else { + // Assert: pullIntoDescriptor.[[readerType]] is "byob". + readableStreamFulfillReadIntoRequest(stream, filledView, done); + } +} + +export function readableByteStreamControllerConvertPullIntoDescriptor( + pullIntoDescriptor: PullIntoDescriptor +) { + const { bytesFilled, elementSize } = pullIntoDescriptor; + // Assert: bytesFilled <= pullIntoDescriptor.byteLength + // Assert: bytesFilled mod elementSize is 0 + return new pullIntoDescriptor.ctor( + pullIntoDescriptor.buffer, + pullIntoDescriptor.byteOffset, + bytesFilled / elementSize + ); +} + +export function readableByteStreamControllerEnqueue( + controller: SDReadableByteStreamController, + chunk: ArrayBufferView +) { + const stream = controller[controlledReadableByteStream_]; + // Assert: controller.[[closeRequested]] is false. + // Assert: stream.[[state]] is "readable". + const { buffer, byteOffset, byteLength } = chunk; + + const transferredBuffer = shared.transferArrayBuffer(buffer); + + if (readableStreamHasDefaultReader(stream)) { + if (readableStreamGetNumReadRequests(stream) === 0) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength + ); + } else { + // Assert: controller.[[queue]] is empty. + const transferredView = new Uint8Array( + transferredBuffer, + byteOffset, + byteLength + ); + readableStreamFulfillReadRequest(stream, transferredView, false); + } + } else if (readableStreamHasBYOBReader(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength + ); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller + ); + } else { + // Assert: !IsReadableStreamLocked(stream) is false. + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferredBuffer, + byteOffset, + byteLength + ); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +export function readableByteStreamControllerEnqueueChunkToQueue( + controller: SDReadableByteStreamController, + buffer: ArrayBufferLike, + byteOffset: number, + byteLength: number +) { + controller[q.queue_].push({ buffer, byteOffset, byteLength }); + controller[q.queueTotalSize_] += byteLength; +} + +export function readableByteStreamControllerError( + controller: SDReadableByteStreamController, + error: shared.ErrorResult +) { + const stream = controller[controlledReadableByteStream_]; + if (stream[shared.state_] !== "readable") { + return; + } + readableByteStreamControllerClearPendingPullIntos(controller); + q.resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +export function readableByteStreamControllerFillHeadPullIntoDescriptor( + controller: SDReadableByteStreamController, + size: number, + pullIntoDescriptor: PullIntoDescriptor +) { + // Assert: either controller.[[pendingPullIntos]] is empty, or the first element of controller.[[pendingPullIntos]] is pullIntoDescriptor. + readableByteStreamControllerInvalidateBYOBRequest(controller); + pullIntoDescriptor.bytesFilled += size; +} + +export function readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller: SDReadableByteStreamController, + pullIntoDescriptor: PullIntoDescriptor +) { + const elementSize = pullIntoDescriptor.elementSize; + const currentAlignedBytes = + pullIntoDescriptor.bytesFilled - + (pullIntoDescriptor.bytesFilled % elementSize); + const maxBytesToCopy = Math.min( + controller[q.queueTotalSize_], + pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled + ); + const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy; + const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize); + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + + if (maxAlignedBytes > currentAlignedBytes) { + totalBytesToCopyRemaining = + maxAlignedBytes - pullIntoDescriptor.bytesFilled; + ready = true; + } + const queue = controller[q.queue_]; + + while (totalBytesToCopyRemaining > 0) { + const headOfQueue = queue.front()!; + const bytesToCopy = Math.min( + totalBytesToCopyRemaining, + headOfQueue.byteLength + ); + const destStart = + pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; + shared.copyDataBlockBytes( + pullIntoDescriptor.buffer, + destStart, + headOfQueue.buffer, + headOfQueue.byteOffset, + bytesToCopy + ); + if (headOfQueue.byteLength === bytesToCopy) { + queue.shift(); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller[q.queueTotalSize_] -= bytesToCopy; + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesToCopy, + pullIntoDescriptor + ); + totalBytesToCopyRemaining -= bytesToCopy; + } + if (!ready) { + // Assert: controller[queueTotalSize_] === 0 + // Assert: pullIntoDescriptor.bytesFilled > 0 + // Assert: pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize + } + return ready; +} + +export function readableByteStreamControllerGetDesiredSize( + controller: SDReadableByteStreamController +) { + const stream = controller[controlledReadableByteStream_]; + const state = stream[shared.state_]; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller[strategyHWM_] - controller[q.queueTotalSize_]; +} + +export function readableByteStreamControllerHandleQueueDrain( + controller: SDReadableByteStreamController +) { + // Assert: controller.[[controlledReadableByteStream]].[[state]] is "readable". + if (controller[q.queueTotalSize_] === 0 && controller[closeRequested_]) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(controller[controlledReadableByteStream_]); + } else { + readableByteStreamControllerCallPullIfNeeded(controller); + } +} + +export function readableByteStreamControllerInvalidateBYOBRequest( + controller: SDReadableByteStreamController +) { + const byobRequest = controller[byobRequest_]; + if (byobRequest === undefined) { + return; + } + byobRequest[associatedReadableByteStreamController_] = undefined; + byobRequest[view_] = undefined; + controller[byobRequest_] = undefined; +} + +export function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller: SDReadableByteStreamController +) { + // Assert: controller.[[closeRequested]] is false. + const pendingPullIntos = controller[pendingPullIntos_]; + while (pendingPullIntos.length > 0) { + if (controller[q.queueTotalSize_] === 0) { + return; + } + const pullIntoDescriptor = pendingPullIntos[0]; + if ( + readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + pullIntoDescriptor + ) + ) { + readableByteStreamControllerShiftPendingPullInto(controller); + readableByteStreamControllerCommitPullIntoDescriptor( + controller[controlledReadableByteStream_], + pullIntoDescriptor + ); + } + } +} + +export function readableByteStreamControllerPullInto( + controller: SDReadableByteStreamController, + view: ArrayBufferView, + forAuthorCode: boolean +) { + const stream = controller[controlledReadableByteStream_]; + + const elementSize = (view as Uint8Array).BYTES_PER_ELEMENT || 1; // DataView exposes this in Webkit as 1, is not present in FF or Blink + const ctor = view.constructor as Uint8ArrayConstructor; // the typecast here is just for TS typing, it does not influence buffer creation + + const byteOffset = view.byteOffset; + const byteLength = view.byteLength; + const buffer = shared.transferArrayBuffer(view.buffer); + const pullIntoDescriptor: PullIntoDescriptor = { + buffer, + byteOffset, + byteLength, + bytesFilled: 0, + elementSize, + ctor, + readerType: "byob" + }; + + if (controller[pendingPullIntos_].length > 0) { + controller[pendingPullIntos_].push(pullIntoDescriptor); + return readableStreamAddReadIntoRequest(stream, forAuthorCode); + } + if (stream[shared.state_] === "closed") { + const emptyView = new ctor( + pullIntoDescriptor.buffer, + pullIntoDescriptor.byteOffset, + 0 + ); + return Promise.resolve( + readableStreamCreateReadResult(emptyView, true, forAuthorCode) + ); + } + + if (controller[q.queueTotalSize_] > 0) { + if ( + readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + pullIntoDescriptor + ) + ) { + const filledView = readableByteStreamControllerConvertPullIntoDescriptor( + pullIntoDescriptor + ); + readableByteStreamControllerHandleQueueDrain(controller); + return Promise.resolve( + readableStreamCreateReadResult(filledView, false, forAuthorCode) + ); + } + if (controller[closeRequested_]) { + const error = new TypeError(); + readableByteStreamControllerError(controller, error); + return Promise.reject(error); + } + } + + controller[pendingPullIntos_].push(pullIntoDescriptor); + const promise = readableStreamAddReadIntoRequest(stream, forAuthorCode); + readableByteStreamControllerCallPullIfNeeded(controller); + return promise; +} + +export function readableByteStreamControllerRespond( + controller: SDReadableByteStreamController, + bytesWritten: number +) { + bytesWritten = Number(bytesWritten); + if (!shared.isFiniteNonNegativeNumber(bytesWritten)) { + throw new RangeError("bytesWritten must be a finite, non-negative number"); + } + // Assert: controller.[[pendingPullIntos]] is not empty. + readableByteStreamControllerRespondInternal(controller, bytesWritten); +} + +export function readableByteStreamControllerRespondInClosedState( + controller: SDReadableByteStreamController, + firstDescriptor: PullIntoDescriptor +) { + firstDescriptor.buffer = shared.transferArrayBuffer(firstDescriptor.buffer); + // Assert: firstDescriptor.[[bytesFilled]] is 0. + const stream = controller[controlledReadableByteStream_]; + if (readableStreamHasBYOBReader(stream)) { + while (readableStreamGetNumReadIntoRequests(stream) > 0) { + const pullIntoDescriptor = readableByteStreamControllerShiftPendingPullInto( + controller + )!; + readableByteStreamControllerCommitPullIntoDescriptor( + stream, + pullIntoDescriptor + ); + } + } +} + +export function readableByteStreamControllerRespondInReadableState( + controller: SDReadableByteStreamController, + bytesWritten: number, + pullIntoDescriptor: PullIntoDescriptor +) { + if ( + pullIntoDescriptor.bytesFilled + bytesWritten > + pullIntoDescriptor.byteLength + ) { + throw new RangeError(); + } + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesWritten, + pullIntoDescriptor + ); + if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) { + return; + } + readableByteStreamControllerShiftPendingPullInto(controller); + const remainderSize = + pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize; + if (remainderSize > 0) { + const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; + const remainder = shared.cloneArrayBuffer( + pullIntoDescriptor.buffer, + end - remainderSize, + remainderSize, + ArrayBuffer + ); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + remainder, + 0, + remainder.byteLength + ); + } + pullIntoDescriptor.buffer = shared.transferArrayBuffer( + pullIntoDescriptor.buffer + ); + pullIntoDescriptor.bytesFilled = + pullIntoDescriptor.bytesFilled - remainderSize; + readableByteStreamControllerCommitPullIntoDescriptor( + controller[controlledReadableByteStream_], + pullIntoDescriptor + ); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); +} + +export function readableByteStreamControllerRespondInternal( + controller: SDReadableByteStreamController, + bytesWritten: number +) { + const firstDescriptor = controller[pendingPullIntos_][0]; + const stream = controller[controlledReadableByteStream_]; + if (stream[shared.state_] === "closed") { + if (bytesWritten !== 0) { + throw new TypeError(); + } + readableByteStreamControllerRespondInClosedState( + controller, + firstDescriptor + ); + } else { + // Assert: stream.[[state]] is "readable". + readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + firstDescriptor + ); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +export function readableByteStreamControllerRespondWithNewView( + controller: SDReadableByteStreamController, + view: ArrayBufferView +) { + // Assert: controller.[[pendingPullIntos]] is not empty. + const firstDescriptor = controller[pendingPullIntos_][0]; + if ( + firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== + view.byteOffset + ) { + throw new RangeError(); + } + if (firstDescriptor.byteLength !== view.byteLength) { + throw new RangeError(); + } + firstDescriptor.buffer = view.buffer; + readableByteStreamControllerRespondInternal(controller, view.byteLength); +} + +export function readableByteStreamControllerShiftPendingPullInto( + controller: SDReadableByteStreamController +) { + const descriptor = controller[pendingPullIntos_].shift(); + readableByteStreamControllerInvalidateBYOBRequest(controller); + return descriptor; +} + +export function readableByteStreamControllerShouldCallPull( + controller: SDReadableByteStreamController +) { + // Let stream be controller.[[controlledReadableByteStream]]. + const stream = controller[controlledReadableByteStream_]; + if (stream[shared.state_] !== "readable") { + return false; + } + if (controller[closeRequested_]) { + return false; + } + if (!controller[started_]) { + return false; + } + if ( + readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0 + ) { + return true; + } + if ( + readableStreamHasBYOBReader(stream) && + readableStreamGetNumReadIntoRequests(stream) > 0 + ) { + return true; + } + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + // Assert: desiredSize is not null. + return desiredSize! > 0; +} + +export function setUpReadableStreamBYOBRequest( + request: SDReadableStreamBYOBRequest, + controller: SDReadableByteStreamController, + view: ArrayBufferView +) { + if (!isReadableByteStreamController(controller)) { + throw new TypeError(); + } + if (!ArrayBuffer.isView(view)) { + throw new TypeError(); + } + // Assert: !IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is false. + + request[associatedReadableByteStreamController_] = controller; + request[view_] = view; +} diff --git a/cli/js/streams/readable-stream-byob-reader.ts b/cli/js/streams/readable-stream-byob-reader.ts new file mode 100644 index 00000000000000..20d564c0c68d7c --- /dev/null +++ b/cli/js/streams/readable-stream-byob-reader.ts @@ -0,0 +1,91 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-stream-byob-reader - ReadableStreamBYOBReader class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as shared from "./shared-internals.ts"; + +export class SDReadableStreamBYOBReader + implements rs.SDReadableStreamBYOBReader { + [rs.closedPromise_]: shared.ControlledPromise; + [rs.ownerReadableStream_]: rs.SDReadableStream | undefined; + [rs.readIntoRequests_]: rs.ReadRequest>[]; + + constructor(stream: rs.SDReadableStream) { + if (!rs.isReadableStream(stream)) { + throw new TypeError(); + } + if ( + !rs.isReadableByteStreamController(stream[rs.readableStreamController_]) + ) { + throw new TypeError(); + } + if (rs.isReadableStreamLocked(stream)) { + throw new TypeError("The stream is locked."); + } + rs.readableStreamReaderGenericInitialize(this, stream); + this[rs.readIntoRequests_] = []; + } + + get closed(): Promise { + if (!rs.isReadableStreamBYOBReader(this)) { + return Promise.reject(new TypeError()); + } + return this[rs.closedPromise_].promise; + } + + cancel(reason: shared.ErrorResult): Promise { + if (!rs.isReadableStreamBYOBReader(this)) { + return Promise.reject(new TypeError()); + } + const stream = this[rs.ownerReadableStream_]; + if (stream === undefined) { + return Promise.reject( + new TypeError("Reader is not associated with a stream") + ); + } + return rs.readableStreamCancel(stream, reason); + } + + read(view: ArrayBufferView): Promise> { + if (!rs.isReadableStreamBYOBReader(this)) { + return Promise.reject(new TypeError()); + } + if (this[rs.ownerReadableStream_] === undefined) { + return Promise.reject( + new TypeError("Reader is not associated with a stream") + ); + } + if (!ArrayBuffer.isView(view)) { + return Promise.reject( + new TypeError("view argument must be a valid ArrayBufferView") + ); + } + // If ! IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is true, return a promise rejected with a TypeError exception. + if (view.byteLength === 0) { + return Promise.reject( + new TypeError("supplied buffer view must be > 0 bytes") + ); + } + return rs.readableStreamBYOBReaderRead(this, view, true); + } + + releaseLock(): void { + if (!rs.isReadableStreamBYOBReader(this)) { + throw new TypeError(); + } + if (this[rs.ownerReadableStream_] === undefined) { + throw new TypeError("Reader is not associated with a stream"); + } + if (this[rs.readIntoRequests_].length > 0) { + throw new TypeError(); + } + rs.readableStreamReaderGenericRelease(this); + } +} diff --git a/cli/js/streams/readable-stream-byob-request.ts b/cli/js/streams/readable-stream-byob-request.ts new file mode 100644 index 00000000000000..ff96613084fb9c --- /dev/null +++ b/cli/js/streams/readable-stream-byob-request.ts @@ -0,0 +1,60 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-stream-byob-request - ReadableStreamBYOBRequest class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; + +export class ReadableStreamBYOBRequest { + [rs.associatedReadableByteStreamController_]: + | rs.SDReadableByteStreamController + | undefined; + [rs.view_]: ArrayBufferView | undefined; + + constructor() { + throw new TypeError(); + } + + get view(): ArrayBufferView { + if (!rs.isReadableStreamBYOBRequest(this)) { + throw new TypeError(); + } + return this[rs.view_]!; + } + + respond(bytesWritten: number) { + if (!rs.isReadableStreamBYOBRequest(this)) { + throw new TypeError(); + } + if (this[rs.associatedReadableByteStreamController_] === undefined) { + throw new TypeError(); + } + // If! IsDetachedBuffer(this.[[view]].[[ViewedArrayBuffer]]) is true, throw a TypeError exception. + return rs.readableByteStreamControllerRespond( + this[rs.associatedReadableByteStreamController_]!, + bytesWritten + ); + } + + respondWithNewView(view: ArrayBufferView) { + if (!rs.isReadableStreamBYOBRequest(this)) { + throw new TypeError(); + } + if (this[rs.associatedReadableByteStreamController_] === undefined) { + throw new TypeError(); + } + if (!ArrayBuffer.isView(view)) { + throw new TypeError("view parameter must be a TypedArray"); + } + // If! IsDetachedBuffer(view.[[ViewedArrayBuffer]]) is true, throw a TypeError exception. + return rs.readableByteStreamControllerRespondWithNewView( + this[rs.associatedReadableByteStreamController_]!, + view + ); + } +} diff --git a/cli/js/streams/readable-stream-default-controller.ts b/cli/js/streams/readable-stream-default-controller.ts new file mode 100644 index 00000000000000..dd8b98a9d7ff85 --- /dev/null +++ b/cli/js/streams/readable-stream-default-controller.ts @@ -0,0 +1,134 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-stream-default-controller - ReadableStreamDefaultController class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as shared from "./shared-internals.ts"; +import * as q from "./queue-mixin.ts"; +import { Queue } from "./queue.ts"; +import { QueuingStrategySizeCallback, UnderlyingSource } from "../dom_types.ts"; + +export class ReadableStreamDefaultController + implements rs.SDReadableStreamDefaultController { + [rs.cancelAlgorithm_]: rs.CancelAlgorithm; + [rs.closeRequested_]: boolean; + [rs.controlledReadableStream_]: rs.SDReadableStream; + [rs.pullAgain_]: boolean; + [rs.pullAlgorithm_]: rs.PullAlgorithm; + [rs.pulling_]: boolean; + [rs.strategyHWM_]: number; + [rs.strategySizeAlgorithm_]: QueuingStrategySizeCallback; + [rs.started_]: boolean; + + [q.queue_]: Queue>; + [q.queueTotalSize_]: number; + + constructor() { + throw new TypeError(); + } + + get desiredSize(): number | null { + return rs.readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!rs.isReadableStreamDefaultController(this)) { + throw new TypeError(); + } + if (!rs.readableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError( + "Cannot close, the stream is already closing or not readable" + ); + } + rs.readableStreamDefaultControllerClose(this); + } + + enqueue(chunk?: OutputType) { + if (!rs.isReadableStreamDefaultController(this)) { + throw new TypeError(); + } + if (!rs.readableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError( + "Cannot enqueue, the stream is closing or not readable" + ); + } + rs.readableStreamDefaultControllerEnqueue(this, chunk!); + } + + error(e?: shared.ErrorResult) { + if (!rs.isReadableStreamDefaultController(this)) { + throw new TypeError(); + } + rs.readableStreamDefaultControllerError(this, e); + } + + [rs.cancelSteps_](reason: shared.ErrorResult) { + q.resetQueue(this); + const result = this[rs.cancelAlgorithm_](reason); + rs.readableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [rs.pullSteps_](forAuthorCode: boolean) { + const stream = this[rs.controlledReadableStream_]; + if (this[q.queue_].length > 0) { + const chunk = q.dequeueValue(this); + if (this[rs.closeRequested_] && this[q.queue_].length === 0) { + rs.readableStreamDefaultControllerClearAlgorithms(this); + rs.readableStreamClose(stream); + } else { + rs.readableStreamDefaultControllerCallPullIfNeeded(this); + } + return Promise.resolve( + rs.readableStreamCreateReadResult(chunk, false, forAuthorCode) + ); + } + + const pendingPromise = rs.readableStreamAddReadRequest( + stream, + forAuthorCode + ); + rs.readableStreamDefaultControllerCallPullIfNeeded(this); + return pendingPromise; + } +} + +export function setUpReadableStreamDefaultControllerFromUnderlyingSource< + OutputType +>( + stream: rs.SDReadableStream, + underlyingSource: UnderlyingSource, + highWaterMark: number, + sizeAlgorithm: QueuingStrategySizeCallback +) { + // Assert: underlyingSource is not undefined. + const controller = Object.create(ReadableStreamDefaultController.prototype); + const startAlgorithm = () => { + return shared.invokeOrNoop(underlyingSource, "start", [controller]); + }; + const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingSource, + "pull", + [controller] + ); + const cancelAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingSource, + "cancel", + [] + ); + rs.setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm + ); +} diff --git a/cli/js/streams/readable-stream-default-reader.ts b/cli/js/streams/readable-stream-default-reader.ts new file mode 100644 index 00000000000000..578f35c4625bde --- /dev/null +++ b/cli/js/streams/readable-stream-default-reader.ts @@ -0,0 +1,75 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-stream-default-reader - ReadableStreamDefaultReader class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as shared from "./shared-internals.ts"; + +export class ReadableStreamDefaultReader + implements rs.SDReadableStreamReader { + [rs.closedPromise_]: shared.ControlledPromise; + [rs.ownerReadableStream_]: rs.SDReadableStream | undefined; + [rs.readRequests_]: rs.ReadRequest>[]; + + constructor(stream: rs.SDReadableStream) { + if (!rs.isReadableStream(stream)) { + throw new TypeError(); + } + if (rs.isReadableStreamLocked(stream)) { + throw new TypeError("The stream is locked."); + } + rs.readableStreamReaderGenericInitialize(this, stream); + this[rs.readRequests_] = []; + } + + get closed(): Promise { + if (!rs.isReadableStreamDefaultReader(this)) { + return Promise.reject(new TypeError()); + } + return this[rs.closedPromise_].promise; + } + + cancel(reason: shared.ErrorResult): Promise { + if (!rs.isReadableStreamDefaultReader(this)) { + return Promise.reject(new TypeError()); + } + const stream = this[rs.ownerReadableStream_]; + if (stream === undefined) { + return Promise.reject( + new TypeError("Reader is not associated with a stream") + ); + } + return rs.readableStreamCancel(stream, reason); + } + + read(): Promise> { + if (!rs.isReadableStreamDefaultReader(this)) { + return Promise.reject(new TypeError()); + } + if (this[rs.ownerReadableStream_] === undefined) { + return Promise.reject( + new TypeError("Reader is not associated with a stream") + ); + } + return rs.readableStreamDefaultReaderRead(this, true); + } + + releaseLock() { + if (!rs.isReadableStreamDefaultReader(this)) { + throw new TypeError(); + } + if (this[rs.ownerReadableStream_] === undefined) { + return; + } + if (this[rs.readRequests_].length !== 0) { + throw new TypeError("Cannot release a stream with pending read requests"); + } + rs.readableStreamReaderGenericRelease(this); + } +} diff --git a/cli/js/streams/readable-stream.ts b/cli/js/streams/readable-stream.ts new file mode 100644 index 00000000000000..0b5114978aed6c --- /dev/null +++ b/cli/js/streams/readable-stream.ts @@ -0,0 +1,384 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/readable-stream - ReadableStream class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; +import { pipeTo } from "./pipe-to.ts"; +import { + PipeOptions, + QueuingStrategy, + QueuingStrategySizeCallback, + UnderlyingSource, + UnderlyingByteSource +} from "../dom_types.ts"; + +import { + ReadableStreamDefaultController, + setUpReadableStreamDefaultControllerFromUnderlyingSource +} from "./readable-stream-default-controller.ts"; +import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts"; + +import { + ReadableByteStreamController, + setUpReadableByteStreamControllerFromUnderlyingSource +} from "./readable-byte-stream-controller.ts"; +import { SDReadableStreamBYOBReader } from "./readable-stream-byob-reader.ts"; + +export class SDReadableStream + implements rs.SDReadableStream { + [shared.state_]: rs.ReadableStreamState; + [shared.storedError_]: shared.ErrorResult; + [rs.reader_]: rs.SDReadableStreamReader | undefined; + [rs.readableStreamController_]: rs.SDReadableStreamControllerBase; + + constructor( + underlyingSource: UnderlyingByteSource, + strategy?: { highWaterMark?: number; size?: undefined } + ); + constructor( + underlyingSource?: UnderlyingSource, + strategy?: QueuingStrategy + ); + constructor( + underlyingSource: UnderlyingSource | UnderlyingByteSource = {}, + strategy: + | QueuingStrategy + | { highWaterMark?: number; size?: undefined } = {} + ) { + rs.initializeReadableStream(this); + + const sizeFunc = strategy.size; + const stratHWM = strategy.highWaterMark; + const sourceType = underlyingSource.type; + + if (sourceType === undefined) { + const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc); + const highWaterMark = shared.validateAndNormalizeHighWaterMark( + stratHWM === undefined ? 1 : stratHWM + ); + setUpReadableStreamDefaultControllerFromUnderlyingSource( + this, + underlyingSource as UnderlyingSource, + highWaterMark, + sizeAlgorithm + ); + } else if (String(sourceType) === "bytes") { + if (sizeFunc !== undefined) { + throw new RangeError( + "bytes streams cannot have a strategy with a `size` field" + ); + } + const highWaterMark = shared.validateAndNormalizeHighWaterMark( + stratHWM === undefined ? 0 : stratHWM + ); + setUpReadableByteStreamControllerFromUnderlyingSource( + (this as unknown) as rs.SDReadableStream, + underlyingSource as UnderlyingByteSource, + highWaterMark + ); + } else { + throw new RangeError( + "The underlying source's `type` field must be undefined or 'bytes'" + ); + } + } + + get locked(): boolean { + return rs.isReadableStreamLocked(this); + } + + getReader(): rs.SDReadableStreamDefaultReader; + getReader(options: { mode?: "byob" }): rs.SDReadableStreamBYOBReader; + getReader(options?: { + mode?: "byob"; + }): + | rs.SDReadableStreamDefaultReader + | rs.SDReadableStreamBYOBReader { + if (!rs.isReadableStream(this)) { + throw new TypeError(); + } + if (options === undefined) { + options = {}; + } + const { mode } = options; + if (mode === undefined) { + return new ReadableStreamDefaultReader(this); + } else if (String(mode) === "byob") { + return new SDReadableStreamBYOBReader( + (this as unknown) as rs.SDReadableStream + ); + } + throw RangeError("mode option must be undefined or `byob`"); + } + + cancel(reason: shared.ErrorResult): Promise { + if (!rs.isReadableStream(this)) { + return Promise.reject(new TypeError()); + } + if (rs.isReadableStreamLocked(this)) { + return Promise.reject(new TypeError("Cannot cancel a locked stream")); + } + return rs.readableStreamCancel(this, reason); + } + + tee(): SDReadableStream[] { + return readableStreamTee(this, false); + } + + pipeThrough( + transform: rs.GenericTransformStream, + options: PipeOptions = {} + ): rs.SDReadableStream { + const { readable, writable } = transform; + if (!rs.isReadableStream(this)) { + throw new TypeError(); + } + if (!ws.isWritableStream(writable)) { + throw new TypeError("writable must be a WritableStream"); + } + if (!rs.isReadableStream(readable)) { + throw new TypeError("readable must be a ReadableStream"); + } + if (options.signal !== undefined && !shared.isAbortSignal(options.signal)) { + throw new TypeError("options.signal must be an AbortSignal instance"); + } + if (rs.isReadableStreamLocked(this)) { + throw new TypeError("Cannot pipeThrough on a locked stream"); + } + if (ws.isWritableStreamLocked(writable)) { + throw new TypeError("Cannot pipeThrough to a locked stream"); + } + + const pipeResult = pipeTo(this, writable, options); + pipeResult.catch(() => {}); + + return readable; + } + + pipeTo( + dest: ws.WritableStream, + options: PipeOptions = {} + ): Promise { + if (!rs.isReadableStream(this)) { + return Promise.reject(new TypeError()); + } + if (!ws.isWritableStream(dest)) { + return Promise.reject( + new TypeError("destination must be a WritableStream") + ); + } + if (options.signal !== undefined && !shared.isAbortSignal(options.signal)) { + return Promise.reject( + new TypeError("options.signal must be an AbortSignal instance") + ); + } + if (rs.isReadableStreamLocked(this)) { + return Promise.reject(new TypeError("Cannot pipe from a locked stream")); + } + if (ws.isWritableStreamLocked(dest)) { + return Promise.reject(new TypeError("Cannot pipe to a locked stream")); + } + + return pipeTo(this, dest, options); + } +} + +export function createReadableStream( + startAlgorithm: rs.StartAlgorithm, + pullAlgorithm: rs.PullAlgorithm, + cancelAlgorithm: rs.CancelAlgorithm, + highWaterMark?: number, + sizeAlgorithm?: QueuingStrategySizeCallback +) { + if (highWaterMark === undefined) { + highWaterMark = 1; + } + if (sizeAlgorithm === undefined) { + sizeAlgorithm = () => 1; + } + // Assert: ! IsNonNegativeNumber(highWaterMark) is true. + + const stream = Object.create(SDReadableStream.prototype) as SDReadableStream< + OutputType + >; + rs.initializeReadableStream(stream); + const controller = Object.create( + ReadableStreamDefaultController.prototype + ) as ReadableStreamDefaultController; + rs.setUpReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm + ); + return stream; +} + +export function createReadableByteStream( + startAlgorithm: rs.StartAlgorithm, + pullAlgorithm: rs.PullAlgorithm, + cancelAlgorithm: rs.CancelAlgorithm, + highWaterMark?: number, + autoAllocateChunkSize?: number +) { + if (highWaterMark === undefined) { + highWaterMark = 0; + } + // Assert: ! IsNonNegativeNumber(highWaterMark) is true. + if (autoAllocateChunkSize !== undefined) { + if ( + !shared.isInteger(autoAllocateChunkSize) || + autoAllocateChunkSize <= 0 + ) { + throw new RangeError( + "autoAllocateChunkSize must be a positive, finite integer" + ); + } + } + + const stream = Object.create(SDReadableStream.prototype) as SDReadableStream< + OutputType + >; + rs.initializeReadableStream(stream); + const controller = Object.create( + ReadableByteStreamController.prototype + ) as ReadableByteStreamController; + rs.setUpReadableByteStreamController( + (stream as unknown) as SDReadableStream, + controller, + startAlgorithm, + (pullAlgorithm as unknown) as rs.PullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize + ); + return stream; +} + +export function readableStreamTee( + stream: SDReadableStream, + cloneForBranch2: boolean +) { + if (!rs.isReadableStream(stream)) { + throw new TypeError(); + } + + const reader = new ReadableStreamDefaultReader(stream); + let closedOrErrored = false; + let canceled1 = false; + let canceled2 = false; + let reason1: shared.ErrorResult; + let reason2: shared.ErrorResult; + let branch1: SDReadableStream; + let branch2: SDReadableStream; + + let cancelResolve: (reason: shared.ErrorResult) => void; + const cancelPromise = new Promise(resolve => (cancelResolve = resolve)); + + const pullAlgorithm = () => { + return rs + .readableStreamDefaultReaderRead(reader) + .then(({ value, done }) => { + if (done && !closedOrErrored) { + if (!canceled1) { + rs.readableStreamDefaultControllerClose(branch1![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController); + } + if (!canceled2) { + rs.readableStreamDefaultControllerClose(branch2![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController); + } + closedOrErrored = true; + } + if (closedOrErrored) { + return; + } + const value1 = value; + let value2 = value; + if (!canceled1) { + rs.readableStreamDefaultControllerEnqueue( + branch1![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController, + value1! + ); + } + if (!canceled2) { + if (cloneForBranch2) { + value2 = shared.cloneValue(value2); + } + rs.readableStreamDefaultControllerEnqueue( + branch2![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController, + value2! + ); + } + }); + }; + + const cancel1Algorithm = (reason: shared.ErrorResult) => { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const cancelResult = rs.readableStreamCancel(stream, [reason1, reason2]); + cancelResolve(cancelResult); + } + return cancelPromise; + }; + + const cancel2Algorithm = (reason: shared.ErrorResult) => { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const cancelResult = rs.readableStreamCancel(stream, [reason1, reason2]); + cancelResolve(cancelResult); + } + return cancelPromise; + }; + + const startAlgorithm = () => undefined; + branch1 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel1Algorithm + ); + branch2 = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancel2Algorithm + ); + + reader[rs.closedPromise_].promise.catch(error => { + if (!closedOrErrored) { + rs.readableStreamDefaultControllerError( + branch1![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController, + error + ); + rs.readableStreamDefaultControllerError( + branch2![ + rs.readableStreamController_ + ] as ReadableStreamDefaultController, + error + ); + closedOrErrored = true; + } + }); + + return [branch1, branch2]; +} diff --git a/cli/js/streams/shared-internals.ts b/cli/js/streams/shared-internals.ts new file mode 100644 index 00000000000000..3442e5323dfe80 --- /dev/null +++ b/cli/js/streams/shared-internals.ts @@ -0,0 +1,307 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/shared-internals - common types and methods for streams + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import { AbortSignal, QueuingStrategySizeCallback } from "../dom_types.ts"; +import { DenoError, ErrorKind } from "../errors.ts"; + +// common stream fields + +export const state_ = Symbol("state_"); +export const storedError_ = Symbol("storedError_"); + +// --------- + +/** An error reason / result can be anything */ +export type ErrorResult = any; + +// --------- + +export function isInteger(value: number) { + if (!isFinite(value)) { + // covers NaN, +Infinity and -Infinity + return false; + } + const absValue = Math.abs(value); + return Math.floor(absValue) === absValue; +} + +export function isFiniteNonNegativeNumber(value: unknown) { + if (!(typeof value === "number" && isFinite(value))) { + // covers NaN, +Infinity and -Infinity + return false; + } + return value >= 0; +} + +export function isAbortSignal(signal: any): signal is AbortSignal { + if (typeof signal !== "object" || signal === null) { + return false; + } + try { + // TODO + // calling signal.aborted() probably isn't the right way to perform this test + // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/shared-internals.ts#L41 + signal.aborted(); + return true; + } catch (err) { + return false; + } +} + +export function invokeOrNoop( + o: O, + p: P, + args: any[] +) { + // Assert: O is not undefined. + // Assert: IsPropertyKey(P) is true. + // Assert: args is a List. + const method: Function | undefined = (o as any)[p]; // tslint:disable-line:ban-types + if (method === undefined) { + return undefined; + } + return Function.prototype.apply.call(method, o, args); +} + +export function cloneArrayBuffer( + srcBuffer: ArrayBufferLike, + srcByteOffset: number, + srcLength: number, + cloneConstructor: ArrayBufferConstructor | SharedArrayBufferConstructor +): InstanceType { + // this function fudges the return type but SharedArrayBuffer is disabled for a while anyway + return srcBuffer.slice( + srcByteOffset, + srcByteOffset + srcLength + ) as InstanceType; +} + +export function transferArrayBuffer(buffer: ArrayBufferLike) { + // This would in a JS engine context detach the buffer's backing store and return + // a new ArrayBuffer with the same backing store, invalidating `buffer`, + // i.e. a move operation in C++ parlance. + // Sadly ArrayBuffer.transfer is yet to be implemented by a single browser vendor. + return buffer.slice(0); // copies instead of moves +} + +export function copyDataBlockBytes( + toBlock: ArrayBufferLike, + toIndex: number, + fromBlock: ArrayBufferLike, + fromIndex: number, + count: number +) { + new Uint8Array(toBlock, toIndex, count).set( + new Uint8Array(fromBlock, fromIndex, count) + ); +} + +// helper memoisation map for object values +// weak so it doesn't keep memoized versions of old objects indefinitely. +const objectCloneMemo = new WeakMap(); + +let sharedArrayBufferSupported_: boolean | undefined; +function supportsSharedArrayBuffer(): boolean { + if (sharedArrayBufferSupported_ === undefined) { + try { + new SharedArrayBuffer(16); + sharedArrayBufferSupported_ = true; + } catch (e) { + sharedArrayBufferSupported_ = false; + } + } + return sharedArrayBufferSupported_; +} + +/** + * Implement a method of value cloning that is reasonably close to performing `StructuredSerialize(StructuredDeserialize(value))` + * from the HTML standard. Used by the internal `readableStreamTee` method to clone values for connected implementations. + * @see https://html.spec.whatwg.org/multipage/structured-data.html#structuredserializeinternal + */ +export function cloneValue(value: any): any { + const valueType = typeof value; + switch (valueType) { + case "number": + case "string": + case "boolean": + case "undefined": + // @ts-ignore + case "bigint": + return value; + case "object": { + if (objectCloneMemo.has(value)) { + return objectCloneMemo.get(value); + } + if (value === null) { + return value; + } + if (value instanceof Date) { + return new Date(value.valueOf()); + } + if (value instanceof RegExp) { + return new RegExp(value); + } + if (supportsSharedArrayBuffer() && value instanceof SharedArrayBuffer) { + return value; + } + if (value instanceof ArrayBuffer) { + const cloned = cloneArrayBuffer( + value, + 0, + value.byteLength, + ArrayBuffer + ); + objectCloneMemo.set(value, cloned); + return cloned; + } + if (ArrayBuffer.isView(value)) { + const clonedBuffer = cloneValue(value.buffer) as ArrayBufferLike; + // Use DataViewConstructor type purely for type-checking, can be a DataView or TypedArray. + // They use the same constructor signature, only DataView has a length in bytes and TypedArrays + // use a length in terms of elements, so we adjust for that. + let length: number; + if (value instanceof DataView) { + length = value.byteLength; + } else { + length = (value as Uint8Array).length; + } + return new (value.constructor as DataViewConstructor)( + clonedBuffer, + value.byteOffset, + length + ); + } + if (value instanceof Map) { + const clonedMap = new Map(); + objectCloneMemo.set(value, clonedMap); + value.forEach((v, k) => clonedMap.set(k, cloneValue(v))); + return clonedMap; + } + if (value instanceof Set) { + const clonedSet = new Map(); + objectCloneMemo.set(value, clonedSet); + value.forEach((v, k) => clonedSet.set(k, cloneValue(v))); + return clonedSet; + } + + // generic object + const clonedObj = {} as any; + objectCloneMemo.set(value, clonedObj); + const sourceKeys = Object.getOwnPropertyNames(value); + for (const key of sourceKeys) { + clonedObj[key] = cloneValue(value[key]); + } + return clonedObj; + } + case "symbol": + case "function": + default: + // TODO this should be a DOMException, + // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/shared-internals.ts#L171 + throw new DenoError( + ErrorKind.DataCloneError, + "Uncloneable value in stream" + ); + } +} + +export function promiseCall( + f: F, + v: object | undefined, + args: any[] +) { + // tslint:disable-line:ban-types + try { + const result = Function.prototype.apply.call(f, v, args); + return Promise.resolve(result); + } catch (err) { + return Promise.reject(err); + } +} + +export function createAlgorithmFromUnderlyingMethod< + O extends object, + K extends keyof O +>(obj: O, methodName: K, extraArgs: any[]) { + const method = obj[methodName]; + if (method === undefined) { + return () => Promise.resolve(undefined); + } + if (typeof method !== "function") { + throw new TypeError(`Field "${methodName}" is not a function.`); + } + return function(...fnArgs: any[]) { + return promiseCall(method, obj, fnArgs.concat(extraArgs)); + }; +} + +/* +Deprecated for now, all usages replaced by readableStreamCreateReadResult + +function createIterResultObject(value: T, done: boolean): IteratorResult { + return { value, done }; +} +*/ + +export function validateAndNormalizeHighWaterMark(hwm: unknown) { + const highWaterMark = Number(hwm); + if (isNaN(highWaterMark) || highWaterMark < 0) { + throw new RangeError( + "highWaterMark must be a valid, non-negative integer." + ); + } + return highWaterMark; +} + +export function makeSizeAlgorithmFromSizeFunction( + sizeFn: undefined | ((chunk: T) => number) +): QueuingStrategySizeCallback { + if (typeof sizeFn !== "function" && typeof sizeFn !== "undefined") { + throw new TypeError("size function must be undefined or a function"); + } + return function(chunk: T) { + if (typeof sizeFn === "function") { + return sizeFn(chunk); + } + return 1; + }; +} + +// ---- + +export const enum ControlledPromiseState { + Pending, + Resolved, + Rejected +} + +export interface ControlledPromise { + resolve(value?: V): void; + reject(error: ErrorResult): void; + promise: Promise; + state: ControlledPromiseState; +} + +export function createControlledPromise(): ControlledPromise { + const conProm = { + state: ControlledPromiseState.Pending + } as ControlledPromise; + conProm.promise = new Promise(function(resolve, reject) { + conProm.resolve = function(v?: V) { + conProm.state = ControlledPromiseState.Resolved; + resolve(v); + }; + conProm.reject = function(e?: ErrorResult) { + conProm.state = ControlledPromiseState.Rejected; + reject(e); + }; + }); + return conProm; +} diff --git a/cli/js/streams/strategies.ts b/cli/js/streams/strategies.ts new file mode 100644 index 00000000000000..84c09d84583a1e --- /dev/null +++ b/cli/js/streams/strategies.ts @@ -0,0 +1,36 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/strategies - implementation of the built-in stream strategies + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import { QueuingStrategy } from "../dom_types.ts"; + +export class ByteLengthQueuingStrategy + implements QueuingStrategy { + highWaterMark: number; + + constructor(options: { highWaterMark: number }) { + this.highWaterMark = options.highWaterMark; + } + + size(chunk: ArrayBufferView) { + return chunk.byteLength; + } +} + +export class CountQueuingStrategy implements QueuingStrategy { + highWaterMark: number; + + constructor(options: { highWaterMark: number }) { + this.highWaterMark = options.highWaterMark; + } + + size() { + return 1; + } +} diff --git a/cli/js/streams/transform-internals.ts b/cli/js/streams/transform-internals.ts new file mode 100644 index 00000000000000..dd481dc8bd65e6 --- /dev/null +++ b/cli/js/streams/transform-internals.ts @@ -0,0 +1,362 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/transform-internals - internal types and functions for transform streams + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; + +import { createReadableStream } from "./readable-stream.ts"; +import { createWritableStream } from "./writable-stream.ts"; + +import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; + +export const state_ = Symbol("transformState_"); +export const backpressure_ = Symbol("backpressure_"); +export const backpressureChangePromise_ = Symbol("backpressureChangePromise_"); +export const readable_ = Symbol("readable_"); +export const transformStreamController_ = Symbol("transformStreamController_"); +export const writable_ = Symbol("writable_"); + +export const controlledTransformStream_ = Symbol("controlledTransformStream_"); +export const flushAlgorithm_ = Symbol("flushAlgorithm_"); +export const transformAlgorithm_ = Symbol("transformAlgorithm_"); + +// ---- + +export type TransformFunction = ( + chunk: InputType, + controller: TransformStreamDefaultController +) => void | PromiseLike; +export type TransformAlgorithm = (chunk: InputType) => Promise; +export type FlushFunction = ( + controller: TransformStreamDefaultController +) => void | PromiseLike; +export type FlushAlgorithm = () => Promise; + +// ---- + +export interface TransformStreamDefaultController { + readonly desiredSize: number | null; + enqueue(chunk: OutputType): void; + error(reason: shared.ErrorResult): void; + terminate(): void; + + [controlledTransformStream_]: TransformStream; // The TransformStream instance controlled; also used for the IsTransformStreamDefaultController brand check + [flushAlgorithm_]: FlushAlgorithm; // A promise - returning algorithm which communicates a requested close to the transformer + [transformAlgorithm_]: TransformAlgorithm; // A promise - returning algorithm, taking one argument(the chunk to transform), which requests the transformer perform its transformation +} + +export interface Transformer { + start?( + controller: TransformStreamDefaultController + ): void | PromiseLike; + transform?: TransformFunction; + flush?: FlushFunction; + + readableType?: undefined; // for future spec changes + writableType?: undefined; // for future spec changes +} + +export declare class TransformStream { + constructor( + transformer: Transformer, + writableStrategy: QueuingStrategy, + readableStrategy: QueuingStrategy + ); + + readonly readable: rs.SDReadableStream; + readonly writable: ws.WritableStream; + + [backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed + [backpressureChangePromise_]: shared.ControlledPromise | undefined; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes + [readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object + [transformStreamController_]: TransformStreamDefaultController< + InputType, + OutputType + >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check + [writable_]: ws.WritableStream; // The WritableStream instance controlled by this object +} + +// ---- TransformStream + +export function isTransformStream( + value: unknown +): value is TransformStream { + if (typeof value !== "object" || value === null) { + return false; + } + return transformStreamController_ in value; +} + +export function initializeTransformStream( + stream: TransformStream, + startPromise: Promise, + writableHighWaterMark: number, + writableSizeAlgorithm: QueuingStrategySizeCallback, + readableHighWaterMark: number, + readableSizeAlgorithm: QueuingStrategySizeCallback +) { + const startAlgorithm = function() { + return startPromise; + }; + const writeAlgorithm = function(chunk: InputType) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + }; + const abortAlgorithm = function(reason: shared.ErrorResult) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + }; + const closeAlgorithm = function() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + }; + stream[writable_] = createWritableStream( + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + writableHighWaterMark, + writableSizeAlgorithm + ); + + const pullAlgorithm = function() { + return transformStreamDefaultSourcePullAlgorithm(stream); + }; + const cancelAlgorithm = function(reason: shared.ErrorResult) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return Promise.resolve(undefined); + }; + stream[readable_] = createReadableStream( + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm + ); + + stream[backpressure_] = undefined; + stream[backpressureChangePromise_] = undefined; + transformStreamSetBackpressure(stream, true); + stream[transformStreamController_] = undefined!; // initialize slot for brand-check +} + +export function transformStreamError( + stream: TransformStream, + error: shared.ErrorResult +) { + rs.readableStreamDefaultControllerError( + stream[readable_][ + rs.readableStreamController_ + ] as rs.SDReadableStreamDefaultController, + error + ); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +export function transformStreamErrorWritableAndUnblockWrite< + InputType, + OutputType +>(stream: TransformStream, error: shared.ErrorResult) { + transformStreamDefaultControllerClearAlgorithms( + stream[transformStreamController_] + ); + ws.writableStreamDefaultControllerErrorIfNeeded( + stream[writable_][ws.writableStreamController_]!, + error + ); + if (stream[backpressure_]) { + transformStreamSetBackpressure(stream, false); + } +} + +export function transformStreamSetBackpressure( + stream: TransformStream, + backpressure: boolean +) { + // Assert: stream.[[backpressure]] is not backpressure. + if (stream[backpressure_] !== undefined) { + stream[backpressureChangePromise_]!.resolve(undefined); + } + stream[backpressureChangePromise_] = shared.createControlledPromise(); + stream[backpressure_] = backpressure; +} + +// ---- TransformStreamDefaultController + +export function isTransformStreamDefaultController( + value: unknown +): value is TransformStreamDefaultController { + if (typeof value !== "object" || value === null) { + return false; + } + return controlledTransformStream_ in value; +} + +export function setUpTransformStreamDefaultController( + stream: TransformStream, + controller: TransformStreamDefaultController, + transformAlgorithm: TransformAlgorithm, + flushAlgorithm: FlushAlgorithm +) { + // Assert: ! IsTransformStream(stream) is true. + // Assert: stream.[[transformStreamController]] is undefined. + controller[controlledTransformStream_] = stream; + stream[transformStreamController_] = controller; + controller[transformAlgorithm_] = transformAlgorithm; + controller[flushAlgorithm_] = flushAlgorithm; +} + +export function transformStreamDefaultControllerClearAlgorithms< + InputType, + OutputType +>(controller: TransformStreamDefaultController) { + // Use ! assertions to override type check here, this way we don't + // have to perform type checks/assertions everywhere else. + controller[transformAlgorithm_] = undefined!; + controller[flushAlgorithm_] = undefined!; +} + +export function transformStreamDefaultControllerEnqueue( + controller: TransformStreamDefaultController, + chunk: OutputType +) { + const stream = controller[controlledTransformStream_]; + const readableController = stream[readable_][ + rs.readableStreamController_ + ] as rs.SDReadableStreamDefaultController; + if ( + !rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) + ) { + throw new TypeError(); + } + try { + rs.readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (error) { + transformStreamErrorWritableAndUnblockWrite(stream, error); + throw stream[readable_][shared.storedError_]; + } + const backpressure = rs.readableStreamDefaultControllerHasBackpressure( + readableController + ); + if (backpressure !== stream[backpressure_]) { + // Assert: backpressure is true. + transformStreamSetBackpressure(stream, true); + } +} + +export function transformStreamDefaultControllerError( + controller: TransformStreamDefaultController, + error: shared.ErrorResult +) { + transformStreamError(controller[controlledTransformStream_], error); +} + +export function transformStreamDefaultControllerPerformTransform< + InputType, + OutputType +>( + controller: TransformStreamDefaultController, + chunk: InputType +) { + const transformPromise = controller[transformAlgorithm_](chunk); + return transformPromise.catch(error => { + transformStreamError(controller[controlledTransformStream_], error); + throw error; + }); +} + +export function transformStreamDefaultControllerTerminate< + InputType, + OutputType +>(controller: TransformStreamDefaultController) { + const stream = controller[controlledTransformStream_]; + const readableController = stream[readable_][ + rs.readableStreamController_ + ] as rs.SDReadableStreamDefaultController; + if (rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { + rs.readableStreamDefaultControllerClose(readableController); + } + const error = new TypeError("The transform stream has been terminated"); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +// ---- Transform Sinks + +export function transformStreamDefaultSinkWriteAlgorithm( + stream: TransformStream, + chunk: InputType +) { + // Assert: stream.[[writable]].[[state]] is "writable". + const controller = stream[transformStreamController_]; + if (stream[backpressure_]) { + const backpressureChangePromise = stream[backpressureChangePromise_]!; + // Assert: backpressureChangePromise is not undefined. + return backpressureChangePromise.promise.then(_ => { + const writable = stream[writable_]; + const state = writable[shared.state_]; + if (state === "erroring") { + throw writable[shared.storedError_]; + } + // Assert: state is "writable". + return transformStreamDefaultControllerPerformTransform( + controller, + chunk + ); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); +} + +export function transformStreamDefaultSinkAbortAlgorithm( + stream: TransformStream, + reason: shared.ErrorResult +) { + transformStreamError(stream, reason); + return Promise.resolve(undefined); +} + +export function transformStreamDefaultSinkCloseAlgorithm( + stream: TransformStream +) { + const readable = stream[readable_]; + const controller = stream[transformStreamController_]; + const flushPromise = controller[flushAlgorithm_](); + transformStreamDefaultControllerClearAlgorithms(controller); + + return flushPromise.then( + _ => { + if (readable[shared.state_] === "errored") { + throw readable[shared.storedError_]; + } + const readableController = readable[ + rs.readableStreamController_ + ] as rs.SDReadableStreamDefaultController; + if ( + rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) + ) { + rs.readableStreamDefaultControllerClose(readableController); + } + }, + error => { + transformStreamError(stream, error); + throw readable[shared.storedError_]; + } + ); +} + +// ---- Transform Sources + +export function transformStreamDefaultSourcePullAlgorithm< + InputType, + OutputType +>(stream: TransformStream) { + // Assert: stream.[[backpressure]] is true. + // Assert: stream.[[backpressureChangePromise]] is not undefined. + transformStreamSetBackpressure(stream, false); + return stream[backpressureChangePromise_]!.promise; +} diff --git a/cli/js/streams/transform-stream-default-controller.ts b/cli/js/streams/transform-stream-default-controller.ts new file mode 100644 index 00000000000000..575823628e079f --- /dev/null +++ b/cli/js/streams/transform-stream-default-controller.ts @@ -0,0 +1,57 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/transform-stream-default-controller - TransformStreamDefaultController class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as ts from "./transform-internals.ts"; +import { ErrorResult } from "./shared-internals.ts"; + +export class TransformStreamDefaultController + implements ts.TransformStreamDefaultController { + [ts.controlledTransformStream_]: ts.TransformStream; + [ts.flushAlgorithm_]: ts.FlushAlgorithm; + [ts.transformAlgorithm_]: ts.TransformAlgorithm; + + constructor() { + throw new TypeError(); + } + + get desiredSize(): number | null { + if (!ts.isTransformStreamDefaultController(this)) { + throw new TypeError(); + } + const readableController = this[ts.controlledTransformStream_][ + ts.readable_ + ][rs.readableStreamController_] as rs.SDReadableStreamDefaultController< + OutputType + >; + return rs.readableStreamDefaultControllerGetDesiredSize(readableController); + } + + enqueue(chunk: OutputType): void { + if (!ts.isTransformStreamDefaultController(this)) { + throw new TypeError(); + } + ts.transformStreamDefaultControllerEnqueue(this, chunk); + } + + error(reason: ErrorResult): void { + if (!ts.isTransformStreamDefaultController(this)) { + throw new TypeError(); + } + ts.transformStreamDefaultControllerError(this, reason); + } + + terminate(): void { + if (!ts.isTransformStreamDefaultController(this)) { + throw new TypeError(); + } + ts.transformStreamDefaultControllerTerminate(this); + } +} diff --git a/cli/js/streams/transform-stream.ts b/cli/js/streams/transform-stream.ts new file mode 100644 index 00000000000000..73ebaf50dddf46 --- /dev/null +++ b/cli/js/streams/transform-stream.ts @@ -0,0 +1,143 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/transform-stream - TransformStream class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as rs from "./readable-internals.ts"; +import * as ws from "./writable-internals.ts"; +import * as ts from "./transform-internals.ts"; +import * as shared from "./shared-internals.ts"; +import { TransformStreamDefaultController } from "./transform-stream-default-controller.ts"; +import { QueuingStrategy } from "../dom_types.ts"; + +export class TransformStream { + [ts.backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed + [ts.backpressureChangePromise_]: shared.ControlledPromise; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes + [ts.readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object + [ts.transformStreamController_]: TransformStreamDefaultController< + InputType, + OutputType + >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check + [ts.writable_]: ws.WritableStream; // The WritableStream instance controlled by this object + + constructor( + transformer: ts.Transformer = {}, + writableStrategy: QueuingStrategy = {}, + readableStrategy: QueuingStrategy = {} + ) { + const writableSizeFunction = writableStrategy.size; + const writableHighWaterMark = writableStrategy.highWaterMark; + const readableSizeFunction = readableStrategy.size; + const readableHighWaterMark = readableStrategy.highWaterMark; + + const writableType = transformer.writableType; + if (writableType !== undefined) { + throw new RangeError( + "The transformer's `writableType` field must be undefined" + ); + } + const writableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( + writableSizeFunction + ); + const writableHWM = shared.validateAndNormalizeHighWaterMark( + writableHighWaterMark === undefined ? 1 : writableHighWaterMark + ); + + const readableType = transformer.readableType; + if (readableType !== undefined) { + throw new RangeError( + "The transformer's `readableType` field must be undefined" + ); + } + const readableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( + readableSizeFunction + ); + const readableHWM = shared.validateAndNormalizeHighWaterMark( + readableHighWaterMark === undefined ? 0 : readableHighWaterMark + ); + + const startPromise = shared.createControlledPromise(); + ts.initializeTransformStream( + this, + startPromise.promise, + writableHWM, + writableSizeAlgorithm, + readableHWM, + readableSizeAlgorithm + ); + setUpTransformStreamDefaultControllerFromTransformer(this, transformer); + + const startResult = shared.invokeOrNoop(transformer, "start", [ + this[ts.transformStreamController_] + ]); + startPromise.resolve(startResult); + } + + get readable(): rs.SDReadableStream { + if (!ts.isTransformStream(this)) { + throw new TypeError(); + } + return this[ts.readable_]; + } + + get writable(): ws.WritableStream { + if (!ts.isTransformStream(this)) { + throw new TypeError(); + } + return this[ts.writable_]; + } +} + +function setUpTransformStreamDefaultControllerFromTransformer< + InputType, + OutputType +>( + stream: TransformStream, + transformer: ts.Transformer +) { + const controller = Object.create( + TransformStreamDefaultController.prototype + ) as TransformStreamDefaultController; + let transformAlgorithm: ts.TransformAlgorithm; + + const transformMethod = transformer.transform; + if (transformMethod !== undefined) { + if (typeof transformMethod !== "function") { + throw new TypeError( + "`transform` field of the transformer must be a function" + ); + } + transformAlgorithm = (chunk: InputType) => + shared.promiseCall(transformMethod, transformer, [chunk, controller]); + } else { + // use identity transform + transformAlgorithm = function(chunk: InputType) { + try { + // OutputType and InputType are the same here + ts.transformStreamDefaultControllerEnqueue( + controller, + (chunk as unknown) as OutputType + ); + } catch (error) { + return Promise.reject(error); + } + return Promise.resolve(undefined); + }; + } + const flushAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + transformer, + "flush", + [controller] + ); + ts.setUpTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm + ); +} diff --git a/cli/js/streams/writable-internals.ts b/cli/js/streams/writable-internals.ts new file mode 100644 index 00000000000000..f59bc949487f37 --- /dev/null +++ b/cli/js/streams/writable-internals.ts @@ -0,0 +1,790 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/writable-internals - internal types and functions for writable streams + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as shared from "./shared-internals.ts"; +import * as q from "./queue-mixin.ts"; + +import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; + +export const backpressure_ = Symbol("backpressure_"); +export const closeRequest_ = Symbol("closeRequest_"); +export const inFlightWriteRequest_ = Symbol("inFlightWriteRequest_"); +export const inFlightCloseRequest_ = Symbol("inFlightCloseRequest_"); +export const pendingAbortRequest_ = Symbol("pendingAbortRequest_"); +export const writableStreamController_ = Symbol("writableStreamController_"); +export const writer_ = Symbol("writer_"); +export const writeRequests_ = Symbol("writeRequests_"); + +export const abortAlgorithm_ = Symbol("abortAlgorithm_"); +export const closeAlgorithm_ = Symbol("closeAlgorithm_"); +export const controlledWritableStream_ = Symbol("controlledWritableStream_"); +export const started_ = Symbol("started_"); +export const strategyHWM_ = Symbol("strategyHWM_"); +export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_"); +export const writeAlgorithm_ = Symbol("writeAlgorithm_"); + +export const ownerWritableStream_ = Symbol("ownerWritableStream_"); +export const closedPromise_ = Symbol("closedPromise_"); +export const readyPromise_ = Symbol("readyPromise_"); + +export const errorSteps_ = Symbol("errorSteps_"); +export const abortSteps_ = Symbol("abortSteps_"); + +export type StartFunction = ( + controller: WritableStreamController +) => void | PromiseLike; +export type StartAlgorithm = () => Promise | void; +export type WriteFunction = ( + chunk: InputType, + controller: WritableStreamController +) => void | PromiseLike; +export type WriteAlgorithm = (chunk: InputType) => Promise; +export type CloseAlgorithm = () => Promise; +export type AbortAlgorithm = (reason?: shared.ErrorResult) => Promise; + +// ---- + +export interface WritableStreamController { + error(e?: shared.ErrorResult): void; + + [errorSteps_](): void; + [abortSteps_](reason: shared.ErrorResult): Promise; +} + +export interface WriteRecord { + chunk: InputType; +} + +export interface WritableStreamDefaultController + extends WritableStreamController, + q.QueueContainer | "close"> { + [abortAlgorithm_]: AbortAlgorithm; // A promise - returning algorithm, taking one argument(the abort reason), which communicates a requested abort to the underlying sink + [closeAlgorithm_]: CloseAlgorithm; // A promise - returning algorithm which communicates a requested close to the underlying sink + [controlledWritableStream_]: WritableStream; // The WritableStream instance controlled + [started_]: boolean; // A boolean flag indicating whether the underlying sink has finished starting + [strategyHWM_]: number; // A number supplied by the creator of the stream as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying sink + [strategySizeAlgorithm_]: QueuingStrategySizeCallback; // An algorithm to calculate the size of enqueued chunks, as part of the stream’s queuing strategy + [writeAlgorithm_]: WriteAlgorithm; // A promise-returning algorithm, taking one argument (the chunk to write), which writes data to the underlying sink +} + +// ---- + +export interface WritableStreamWriter { + readonly closed: Promise; + readonly desiredSize: number | null; + readonly ready: Promise; + + abort(reason: shared.ErrorResult): Promise; + close(): Promise; + releaseLock(): void; + write(chunk: InputType): Promise; +} + +export interface WritableStreamDefaultWriter + extends WritableStreamWriter { + [ownerWritableStream_]: WritableStream | undefined; + [closedPromise_]: shared.ControlledPromise; + [readyPromise_]: shared.ControlledPromise; +} + +// ---- + +export type WritableStreamState = + | "writable" + | "closed" + | "erroring" + | "errored"; + +export interface WritableStreamSink { + start?: StartFunction; + write?: WriteFunction; + close?(): void | PromiseLike; + abort?(reason?: shared.ErrorResult): void; + + type?: undefined; // unused, for future revisions +} + +export interface AbortRequest { + reason: shared.ErrorResult; + wasAlreadyErroring: boolean; + promise: Promise; + resolve(): void; + reject(error: shared.ErrorResult): void; +} + +export declare class WritableStream { + constructor( + underlyingSink?: WritableStreamSink, + strategy?: QueuingStrategy + ); + + readonly locked: boolean; + abort(reason?: shared.ErrorResult): Promise; + getWriter(): WritableStreamWriter; + + [shared.state_]: WritableStreamState; + [backpressure_]: boolean; + [closeRequest_]: shared.ControlledPromise | undefined; + [inFlightWriteRequest_]: shared.ControlledPromise | undefined; + [inFlightCloseRequest_]: shared.ControlledPromise | undefined; + [pendingAbortRequest_]: AbortRequest | undefined; + [shared.storedError_]: shared.ErrorResult; + [writableStreamController_]: + | WritableStreamDefaultController + | undefined; + [writer_]: WritableStreamDefaultWriter | undefined; + [writeRequests_]: shared.ControlledPromise[]; +} + +// ---- Stream + +export function initializeWritableStream( + stream: WritableStream +) { + stream[shared.state_] = "writable"; + stream[shared.storedError_] = undefined; + stream[writer_] = undefined; + stream[writableStreamController_] = undefined; + stream[inFlightWriteRequest_] = undefined; + stream[closeRequest_] = undefined; + stream[inFlightCloseRequest_] = undefined; + stream[pendingAbortRequest_] = undefined; + stream[writeRequests_] = []; + stream[backpressure_] = false; +} + +export function isWritableStream(value: unknown): value is WritableStream { + if (typeof value !== "object" || value === null) { + return false; + } + return writableStreamController_ in value; +} + +export function isWritableStreamLocked( + stream: WritableStream +) { + return stream[writer_] !== undefined; +} + +export function writableStreamAbort( + stream: WritableStream, + reason: shared.ErrorResult +) { + const state = stream[shared.state_]; + if (state === "closed" || state === "errored") { + return Promise.resolve(undefined); + } + let pending = stream[pendingAbortRequest_]; + if (pending !== undefined) { + return pending.promise; + } + // Assert: state is "writable" or "erroring". + let wasAlreadyErroring = false; + if (state === "erroring") { + wasAlreadyErroring = true; + reason = undefined; + } + + pending = { + reason, + wasAlreadyErroring + } as AbortRequest; + const promise = new Promise((resolve, reject) => { + pending!.resolve = resolve; + pending!.reject = reject; + }); + pending.promise = promise; + stream[pendingAbortRequest_] = pending; + if (!wasAlreadyErroring) { + writableStreamStartErroring(stream, reason); + } + return promise; +} + +export function writableStreamAddWriteRequest( + stream: WritableStream +) { + // Assert: !IsWritableStreamLocked(stream) is true. + // Assert: stream.[[state]] is "writable". + const writePromise = shared.createControlledPromise(); + stream[writeRequests_].push(writePromise); + return writePromise.promise; +} + +export function writableStreamDealWithRejection( + stream: WritableStream, + error: shared.ErrorResult +) { + const state = stream[shared.state_]; + if (state === "writable") { + writableStreamStartErroring(stream, error); + return; + } + // Assert: state is "erroring" + writableStreamFinishErroring(stream); +} + +export function writableStreamStartErroring( + stream: WritableStream, + reason: shared.ErrorResult +) { + // Assert: stream.[[storedError]] is undefined. + // Assert: stream.[[state]] is "writable". + const controller = stream[writableStreamController_]!; + // Assert: controller is not undefined. + stream[shared.state_] = "erroring"; + stream[shared.storedError_] = reason; + const writer = stream[writer_]; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if ( + !writableStreamHasOperationMarkedInFlight(stream) && + controller[started_] + ) { + writableStreamFinishErroring(stream); + } +} + +export function writableStreamFinishErroring( + stream: WritableStream +) { + // Assert: stream.[[state]] is "erroring". + // Assert: writableStreamHasOperationMarkedInFlight(stream) is false. + stream[shared.state_] = "errored"; + const controller = stream[writableStreamController_]!; + controller[errorSteps_](); + const storedError = stream[shared.storedError_]; + for (const writeRequest of stream[writeRequests_]) { + writeRequest.reject(storedError); + } + stream[writeRequests_] = []; + + const abortRequest = stream[pendingAbortRequest_]; + if (abortRequest === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + stream[pendingAbortRequest_] = undefined; + if (abortRequest.wasAlreadyErroring) { + abortRequest.reject(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + const promise = controller[abortSteps_](abortRequest.reason); + promise.then( + _ => { + abortRequest.resolve(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + error => { + abortRequest.reject(error); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + } + ); +} + +export function writableStreamFinishInFlightWrite( + stream: WritableStream +) { + // Assert: stream.[[inFlightWriteRequest]] is not undefined. + stream[inFlightWriteRequest_]!.resolve(undefined); + stream[inFlightWriteRequest_] = undefined; +} + +export function writableStreamFinishInFlightWriteWithError( + stream: WritableStream, + error: shared.ErrorResult +) { + // Assert: stream.[[inFlightWriteRequest]] is not undefined. + stream[inFlightWriteRequest_]!.reject(error); + stream[inFlightWriteRequest_] = undefined; + // Assert: stream.[[state]] is "writable" or "erroring". + writableStreamDealWithRejection(stream, error); +} + +export function writableStreamFinishInFlightClose( + stream: WritableStream +) { + // Assert: stream.[[inFlightCloseRequest]] is not undefined. + stream[inFlightCloseRequest_]!.resolve(undefined); + stream[inFlightCloseRequest_] = undefined; + const state = stream[shared.state_]; + // Assert: stream.[[state]] is "writable" or "erroring". + if (state === "erroring") { + stream[shared.storedError_] = undefined; + if (stream[pendingAbortRequest_] !== undefined) { + stream[pendingAbortRequest_]!.resolve(); + stream[pendingAbortRequest_] = undefined; + } + } + stream[shared.state_] = "closed"; + const writer = stream[writer_]; + if (writer !== undefined) { + writer[closedPromise_].resolve(undefined); + } + // Assert: stream.[[pendingAbortRequest]] is undefined. + // Assert: stream.[[storedError]] is undefined. +} + +export function writableStreamFinishInFlightCloseWithError( + stream: WritableStream, + error: shared.ErrorResult +) { + // Assert: stream.[[inFlightCloseRequest]] is not undefined. + stream[inFlightCloseRequest_]!.reject(error); + stream[inFlightCloseRequest_] = undefined; + // Assert: stream.[[state]] is "writable" or "erroring". + if (stream[pendingAbortRequest_] !== undefined) { + stream[pendingAbortRequest_]!.reject(error); + stream[pendingAbortRequest_] = undefined; + } + writableStreamDealWithRejection(stream, error); +} + +export function writableStreamCloseQueuedOrInFlight( + stream: WritableStream +) { + return ( + stream[closeRequest_] !== undefined || + stream[inFlightCloseRequest_] !== undefined + ); +} + +export function writableStreamHasOperationMarkedInFlight( + stream: WritableStream +) { + return ( + stream[inFlightWriteRequest_] !== undefined || + stream[inFlightCloseRequest_] !== undefined + ); +} + +export function writableStreamMarkCloseRequestInFlight( + stream: WritableStream +) { + // Assert: stream.[[inFlightCloseRequest]] is undefined. + // Assert: stream.[[closeRequest]] is not undefined. + stream[inFlightCloseRequest_] = stream[closeRequest_]; + stream[closeRequest_] = undefined; +} + +export function writableStreamMarkFirstWriteRequestInFlight( + stream: WritableStream +) { + // Assert: stream.[[inFlightWriteRequest]] is undefined. + // Assert: stream.[[writeRequests]] is not empty. + const writeRequest = stream[writeRequests_].shift()!; + stream[inFlightWriteRequest_] = writeRequest; +} + +export function writableStreamRejectCloseAndClosedPromiseIfNeeded( + stream: WritableStream +) { + // Assert: stream.[[state]] is "errored". + const closeRequest = stream[closeRequest_]; + if (closeRequest !== undefined) { + // Assert: stream.[[inFlightCloseRequest]] is undefined. + closeRequest.reject(stream[shared.storedError_]); + stream[closeRequest_] = undefined; + } + const writer = stream[writer_]; + if (writer !== undefined) { + writer[closedPromise_].reject(stream[shared.storedError_]); + writer[closedPromise_].promise.catch(() => {}); + } +} + +export function writableStreamUpdateBackpressure( + stream: WritableStream, + backpressure: boolean +) { + // Assert: stream.[[state]] is "writable". + // Assert: !WritableStreamCloseQueuedOrInFlight(stream) is false. + const writer = stream[writer_]; + if (writer !== undefined && backpressure !== stream[backpressure_]) { + if (backpressure) { + writer[readyPromise_] = shared.createControlledPromise(); + } else { + writer[readyPromise_].resolve(undefined); + } + } + stream[backpressure_] = backpressure; +} + +// ---- Writers + +export function isWritableStreamDefaultWriter( + value: unknown +): value is WritableStreamDefaultWriter { + if (typeof value !== "object" || value === null) { + return false; + } + return ownerWritableStream_ in value; +} + +export function writableStreamDefaultWriterAbort( + writer: WritableStreamDefaultWriter, + reason: shared.ErrorResult +) { + const stream = writer[ownerWritableStream_]!; + // Assert: stream is not undefined. + return writableStreamAbort(stream, reason); +} + +export function writableStreamDefaultWriterClose( + writer: WritableStreamDefaultWriter +) { + const stream = writer[ownerWritableStream_]!; + // Assert: stream is not undefined. + const state = stream[shared.state_]; + if (state === "closed" || state === "errored") { + return Promise.reject( + new TypeError("Writer stream is already closed or errored") + ); + } + // Assert: state is "writable" or "erroring". + // Assert: writableStreamCloseQueuedOrInFlight(stream) is false. + const closePromise = shared.createControlledPromise(); + stream[closeRequest_] = closePromise; + if (stream[backpressure_] && state === "writable") { + writer[readyPromise_].resolve(undefined); + } + writableStreamDefaultControllerClose(stream[writableStreamController_]!); + return closePromise.promise; +} + +export function writableStreamDefaultWriterCloseWithErrorPropagation( + writer: WritableStreamDefaultWriter +) { + const stream = writer[ownerWritableStream_]!; + // Assert: stream is not undefined. + const state = stream[shared.state_]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { + return Promise.resolve(undefined); + } + if (state === "errored") { + return Promise.reject(stream[shared.storedError_]); + } + // Assert: state is "writable" or "erroring". + return writableStreamDefaultWriterClose(writer); +} + +export function writableStreamDefaultWriterEnsureClosedPromiseRejected< + InputType +>(writer: WritableStreamDefaultWriter, error: shared.ErrorResult) { + const closedPromise = writer[closedPromise_]; + if (closedPromise.state === shared.ControlledPromiseState.Pending) { + closedPromise.reject(error); + } else { + writer[closedPromise_] = shared.createControlledPromise(); + writer[closedPromise_].reject(error); + } + writer[closedPromise_].promise.catch(() => {}); +} + +export function writableStreamDefaultWriterEnsureReadyPromiseRejected< + InputType +>(writer: WritableStreamDefaultWriter, error: shared.ErrorResult) { + const readyPromise = writer[readyPromise_]; + if (readyPromise.state === shared.ControlledPromiseState.Pending) { + readyPromise.reject(error); + } else { + writer[readyPromise_] = shared.createControlledPromise(); + writer[readyPromise_].reject(error); + } + writer[readyPromise_].promise.catch(() => {}); +} + +export function writableStreamDefaultWriterGetDesiredSize( + writer: WritableStreamDefaultWriter +) { + const stream = writer[ownerWritableStream_]!; + const state = stream[shared.state_]; + if (state === "errored" || state === "erroring") { + return null; + } + if (state === "closed") { + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[writableStreamController_]! + ); +} + +export function writableStreamDefaultWriterRelease( + writer: WritableStreamDefaultWriter +) { + const stream = writer[ownerWritableStream_]!; + // Assert: stream is not undefined. + // Assert: stream.[[writer]] is writer. + const releasedError = new TypeError(); + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream[writer_] = undefined; + writer[ownerWritableStream_] = undefined; +} + +export function writableStreamDefaultWriterWrite( + writer: WritableStreamDefaultWriter, + chunk: InputType +) { + const stream = writer[ownerWritableStream_]!; + // Assert: stream is not undefined. + const controller = stream[writableStreamController_]!; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk + ); + if (writer[ownerWritableStream_] !== stream) { + return Promise.reject(new TypeError()); + } + const state = stream[shared.state_]; + if (state === "errored") { + return Promise.reject(stream[shared.storedError_]); + } + if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { + return Promise.reject( + new TypeError("Cannot write to a closing or closed stream") + ); + } + if (state === "erroring") { + return Promise.reject(stream[shared.storedError_]); + } + // Assert: state is "writable". + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; +} + +// ---- Controller + +export function setUpWritableStreamDefaultController( + stream: WritableStream, + controller: WritableStreamDefaultController, + startAlgorithm: StartAlgorithm, + writeAlgorithm: WriteAlgorithm, + closeAlgorithm: CloseAlgorithm, + abortAlgorithm: AbortAlgorithm, + highWaterMark: number, + sizeAlgorithm: QueuingStrategySizeCallback +) { + if (!isWritableStream(stream)) { + throw new TypeError(); + } + if (stream[writableStreamController_] !== undefined) { + throw new TypeError(); + } + + controller[controlledWritableStream_] = stream; + stream[writableStreamController_] = controller; + q.resetQueue(controller); + controller[started_] = false; + controller[strategySizeAlgorithm_] = sizeAlgorithm; + controller[strategyHWM_] = highWaterMark; + controller[writeAlgorithm_] = writeAlgorithm; + controller[closeAlgorithm_] = closeAlgorithm; + controller[abortAlgorithm_] = abortAlgorithm; + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller + ); + writableStreamUpdateBackpressure(stream, backpressure); + + const startResult = startAlgorithm(); + Promise.resolve(startResult).then( + _ => { + // Assert: stream.[[state]] is "writable" or "erroring". + controller[started_] = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + error => { + // Assert: stream.[[state]] is "writable" or "erroring". + controller[started_] = true; + writableStreamDealWithRejection(stream, error); + } + ); +} + +export function isWritableStreamDefaultController( + value: unknown +): value is WritableStreamDefaultController { + if (typeof value !== "object" || value === null) { + return false; + } + return controlledWritableStream_ in value; +} + +export function writableStreamDefaultControllerClearAlgorithms( + controller: WritableStreamDefaultController +) { + // Use ! assertions to override type check here, this way we don't + // have to perform type checks/assertions everywhere else. + controller[writeAlgorithm_] = undefined!; + controller[closeAlgorithm_] = undefined!; + controller[abortAlgorithm_] = undefined!; + controller[strategySizeAlgorithm_] = undefined!; +} + +export function writableStreamDefaultControllerClose( + controller: WritableStreamDefaultController +) { + q.enqueueValueWithSize(controller, "close", 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +export function writableStreamDefaultControllerGetChunkSize( + controller: WritableStreamDefaultController, + chunk: InputType +) { + let chunkSize: number; + try { + chunkSize = controller[strategySizeAlgorithm_](chunk); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + chunkSize = 1; + } + return chunkSize; +} + +export function writableStreamDefaultControllerGetDesiredSize( + controller: WritableStreamDefaultController +) { + return controller[strategyHWM_] - controller[q.queueTotalSize_]; +} + +export function writableStreamDefaultControllerWrite( + controller: WritableStreamDefaultController, + chunk: InputType, + chunkSize: number +) { + try { + q.enqueueValueWithSize(controller, { chunk }, chunkSize); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return; + } + const stream = controller[controlledWritableStream_]; + if ( + !writableStreamCloseQueuedOrInFlight(stream) && + stream[shared.state_] === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +export function writableStreamDefaultControllerAdvanceQueueIfNeeded( + controller: WritableStreamDefaultController +) { + if (!controller[started_]) { + return; + } + const stream = controller[controlledWritableStream_]; + if (stream[inFlightWriteRequest_] !== undefined) { + return; + } + const state = stream[shared.state_]; + if (state === "closed" || state === "errored") { + return; + } + if (state === "erroring") { + writableStreamFinishErroring(stream); + return; + } + if (controller[q.queue_].length === 0) { + return; + } + const writeRecord = q.peekQueueValue(controller); + if (writeRecord === "close") { + writableStreamDefaultControllerProcessClose(controller); + } else { + writableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk); + } +} + +export function writableStreamDefaultControllerErrorIfNeeded( + controller: WritableStreamDefaultController, + error: shared.ErrorResult +) { + if (controller[controlledWritableStream_][shared.state_] === "writable") { + writableStreamDefaultControllerError(controller, error); + } +} + +export function writableStreamDefaultControllerProcessClose( + controller: WritableStreamDefaultController +) { + const stream = controller[controlledWritableStream_]; + writableStreamMarkCloseRequestInFlight(stream); + q.dequeueValue(controller); + // Assert: controller.[[queue]] is empty. + const sinkClosePromise = controller[closeAlgorithm_](); + writableStreamDefaultControllerClearAlgorithms(controller); + sinkClosePromise.then( + _ => { + writableStreamFinishInFlightClose(stream); + }, + error => { + writableStreamFinishInFlightCloseWithError(stream, error); + } + ); +} + +export function writableStreamDefaultControllerProcessWrite( + controller: WritableStreamDefaultController, + chunk: InputType +) { + const stream = controller[controlledWritableStream_]; + writableStreamMarkFirstWriteRequestInFlight(stream); + controller[writeAlgorithm_](chunk).then( + _ => { + writableStreamFinishInFlightWrite(stream); + const state = stream[shared.state_]; + // Assert: state is "writable" or "erroring". + q.dequeueValue(controller); + if ( + !writableStreamCloseQueuedOrInFlight(stream) && + state === "writable" + ) { + const backpressure = writableStreamDefaultControllerGetBackpressure( + controller + ); + writableStreamUpdateBackpressure(stream, backpressure); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + error => { + if (stream[shared.state_] === "writable") { + writableStreamDefaultControllerClearAlgorithms(controller); + } + writableStreamFinishInFlightWriteWithError(stream, error); + } + ); +} + +export function writableStreamDefaultControllerGetBackpressure( + controller: WritableStreamDefaultController +) { + const desiredSize = writableStreamDefaultControllerGetDesiredSize(controller); + return desiredSize <= 0; +} + +export function writableStreamDefaultControllerError( + controller: WritableStreamDefaultController, + error: shared.ErrorResult +) { + const stream = controller[controlledWritableStream_]; + // Assert: stream.[[state]] is "writable". + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); +} diff --git a/cli/js/streams/writable-stream-default-controller.ts b/cli/js/streams/writable-stream-default-controller.ts new file mode 100644 index 00000000000000..c2f065bc0b1232 --- /dev/null +++ b/cli/js/streams/writable-stream-default-controller.ts @@ -0,0 +1,97 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/writable-stream-default-controller - WritableStreamDefaultController class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; +import * as q from "./queue-mixin.ts"; +import { Queue } from "./queue.ts"; +import { QueuingStrategySizeCallback } from "../dom_types.ts"; + +export class WritableStreamDefaultController + implements ws.WritableStreamDefaultController { + [ws.abortAlgorithm_]: ws.AbortAlgorithm; + [ws.closeAlgorithm_]: ws.CloseAlgorithm; + [ws.controlledWritableStream_]: ws.WritableStream; + [ws.started_]: boolean; + [ws.strategyHWM_]: number; + [ws.strategySizeAlgorithm_]: QueuingStrategySizeCallback; + [ws.writeAlgorithm_]: ws.WriteAlgorithm; + + [q.queue_]: Queue | "close">>; + [q.queueTotalSize_]: number; + + constructor() { + throw new TypeError(); + } + + error(e?: shared.ErrorResult) { + if (!ws.isWritableStreamDefaultController(this)) { + throw new TypeError(); + } + const state = this[ws.controlledWritableStream_][shared.state_]; + if (state !== "writable") { + return; + } + ws.writableStreamDefaultControllerError(this, e); + } + + [ws.abortSteps_](reason: shared.ErrorResult) { + const result = this[ws.abortAlgorithm_](reason); + ws.writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [ws.errorSteps_]() { + q.resetQueue(this); + } +} + +export function setUpWritableStreamDefaultControllerFromUnderlyingSink< + InputType +>( + stream: ws.WritableStream, + underlyingSink: ws.WritableStreamSink, + highWaterMark: number, + sizeAlgorithm: QueuingStrategySizeCallback +) { + // Assert: underlyingSink is not undefined. + const controller = Object.create( + WritableStreamDefaultController.prototype + ) as WritableStreamDefaultController; + + const startAlgorithm = function() { + return shared.invokeOrNoop(underlyingSink, "start", [controller]); + }; + const writeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingSink, + "write", + [controller] + ); + const closeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingSink, + "close", + [] + ); + const abortAlgorithm = shared.createAlgorithmFromUnderlyingMethod( + underlyingSink, + "abort", + [] + ); + ws.setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm + ); +} diff --git a/cli/js/streams/writable-stream-default-writer.ts b/cli/js/streams/writable-stream-default-writer.ts new file mode 100644 index 00000000000000..d05832389047c4 --- /dev/null +++ b/cli/js/streams/writable-stream-default-writer.ts @@ -0,0 +1,135 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/writable-stream-default-writer - WritableStreamDefaultWriter class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; + +export class WritableStreamDefaultWriter + implements ws.WritableStreamDefaultWriter { + [ws.ownerWritableStream_]: ws.WritableStream | undefined; + [ws.readyPromise_]: shared.ControlledPromise; + [ws.closedPromise_]: shared.ControlledPromise; + + constructor(stream: ws.WritableStream) { + if (!ws.isWritableStream(stream)) { + throw new TypeError(); + } + if (ws.isWritableStreamLocked(stream)) { + throw new TypeError("Stream is already locked"); + } + this[ws.ownerWritableStream_] = stream; + stream[ws.writer_] = this; + + const readyPromise = shared.createControlledPromise(); + const closedPromise = shared.createControlledPromise(); + this[ws.readyPromise_] = readyPromise; + this[ws.closedPromise_] = closedPromise; + + const state = stream[shared.state_]; + if (state === "writable") { + if ( + !ws.writableStreamCloseQueuedOrInFlight(stream) && + stream[ws.backpressure_] + ) { + // OK Set this.[[readyPromise]] to a new promise. + } else { + readyPromise.resolve(undefined); + } + // OK Set this.[[closedPromise]] to a new promise. + } else if (state === "erroring") { + readyPromise.reject(stream[shared.storedError_]); + readyPromise.promise.catch(() => {}); + // OK Set this.[[closedPromise]] to a new promise. + } else if (state === "closed") { + readyPromise.resolve(undefined); + closedPromise.resolve(undefined); + } else { + // Assert: state is "errored". + const storedError = stream[shared.storedError_]; + readyPromise.reject(storedError); + readyPromise.promise.catch(() => {}); + closedPromise.reject(storedError); + closedPromise.promise.catch(() => {}); + } + } + + abort(reason: shared.ErrorResult): Promise { + if (!ws.isWritableStreamDefaultWriter(this)) { + return Promise.reject(new TypeError()); + } + if (this[ws.ownerWritableStream_] === undefined) { + return Promise.reject( + new TypeError("Writer is not connected to a stream") + ); + } + return ws.writableStreamDefaultWriterAbort(this, reason); + } + + close(): Promise { + if (!ws.isWritableStreamDefaultWriter(this)) { + return Promise.reject(new TypeError()); + } + const stream = this[ws.ownerWritableStream_]; + if (stream === undefined) { + return Promise.reject( + new TypeError("Writer is not connected to a stream") + ); + } + if (ws.writableStreamCloseQueuedOrInFlight(stream)) { + return Promise.reject(new TypeError()); + } + return ws.writableStreamDefaultWriterClose(this); + } + + releaseLock(): void { + const stream = this[ws.ownerWritableStream_]; + if (stream === undefined) { + return; + } + // Assert: stream.[[writer]] is not undefined. + ws.writableStreamDefaultWriterRelease(this); + } + + write(chunk: InputType): Promise { + if (!ws.isWritableStreamDefaultWriter(this)) { + return Promise.reject(new TypeError()); + } + if (this[ws.ownerWritableStream_] === undefined) { + return Promise.reject( + new TypeError("Writer is not connected to a stream") + ); + } + return ws.writableStreamDefaultWriterWrite(this, chunk); + } + + get closed(): Promise { + if (!ws.isWritableStreamDefaultWriter(this)) { + return Promise.reject(new TypeError()); + } + return this[ws.closedPromise_].promise; + } + + get desiredSize(): number | null { + if (!ws.isWritableStreamDefaultWriter(this)) { + throw new TypeError(); + } + if (this[ws.ownerWritableStream_] === undefined) { + throw new TypeError("Writer is not connected to stream"); + } + return ws.writableStreamDefaultWriterGetDesiredSize(this); + } + + get ready(): Promise { + if (!ws.isWritableStreamDefaultWriter(this)) { + return Promise.reject(new TypeError()); + } + return this[ws.readyPromise_].promise; + } +} diff --git a/cli/js/streams/writable-stream.ts b/cli/js/streams/writable-stream.ts new file mode 100644 index 00000000000000..703c7196be465b --- /dev/null +++ b/cli/js/streams/writable-stream.ts @@ -0,0 +1,117 @@ +// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +/** + * streams/writable-stream - WritableStream class implementation + * Part of Stardazed + * (c) 2018-Present by Arthur Langereis - @zenmumbler + * https://github.com/stardazed/sd-streams + */ + +import * as ws from "./writable-internals.ts"; +import * as shared from "./shared-internals.ts"; +import { + WritableStreamDefaultController, + setUpWritableStreamDefaultControllerFromUnderlyingSink +} from "./writable-stream-default-controller.ts"; +import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; +import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; + +export class WritableStream { + [shared.state_]: ws.WritableStreamState; + [shared.storedError_]: shared.ErrorResult; + [ws.backpressure_]: boolean; + [ws.closeRequest_]: shared.ControlledPromise | undefined; + [ws.inFlightWriteRequest_]: shared.ControlledPromise | undefined; + [ws.inFlightCloseRequest_]: shared.ControlledPromise | undefined; + [ws.pendingAbortRequest_]: ws.AbortRequest | undefined; + [ws.writableStreamController_]: + | ws.WritableStreamDefaultController + | undefined; + [ws.writer_]: ws.WritableStreamDefaultWriter | undefined; + [ws.writeRequests_]: shared.ControlledPromise[]; + + constructor( + sink: ws.WritableStreamSink = {}, + strategy: QueuingStrategy = {} + ) { + ws.initializeWritableStream(this); + const sizeFunc = strategy.size; + const stratHWM = strategy.highWaterMark; + if (sink.type !== undefined) { + throw new RangeError("The type of an underlying sink must be undefined"); + } + + const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc); + const highWaterMark = shared.validateAndNormalizeHighWaterMark( + stratHWM === undefined ? 1 : stratHWM + ); + + setUpWritableStreamDefaultControllerFromUnderlyingSink( + this, + sink, + highWaterMark, + sizeAlgorithm + ); + } + + get locked(): boolean { + if (!ws.isWritableStream(this)) { + throw new TypeError(); + } + return ws.isWritableStreamLocked(this); + } + + abort(reason?: shared.ErrorResult): Promise { + if (!ws.isWritableStream(this)) { + return Promise.reject(new TypeError()); + } + if (ws.isWritableStreamLocked(this)) { + return Promise.reject(new TypeError("Cannot abort a locked stream")); + } + return ws.writableStreamAbort(this, reason); + } + + getWriter(): ws.WritableStreamWriter { + if (!ws.isWritableStream(this)) { + throw new TypeError(); + } + return new WritableStreamDefaultWriter(this); + } +} + +export function createWritableStream( + startAlgorithm: ws.StartAlgorithm, + writeAlgorithm: ws.WriteAlgorithm, + closeAlgorithm: ws.CloseAlgorithm, + abortAlgorithm: ws.AbortAlgorithm, + highWaterMark?: number, + sizeAlgorithm?: QueuingStrategySizeCallback +) { + if (highWaterMark === undefined) { + highWaterMark = 1; + } + if (sizeAlgorithm === undefined) { + sizeAlgorithm = () => 1; + } + // Assert: ! IsNonNegativeNumber(highWaterMark) is true. + + const stream = Object.create(WritableStream.prototype) as WritableStream< + InputType + >; + ws.initializeWritableStream(stream); + const controller = Object.create( + WritableStreamDefaultController.prototype + ) as WritableStreamDefaultController; + ws.setUpWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm + ); + return stream; +} From b1bc3a9d0a18aada6e2b82faecc538bfc87950da Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 12:48:16 -0400 Subject: [PATCH 02/10] change the interfaces in dom_types to match what sd-streams expects --- cli/js/body.ts | 10 +++++----- cli/js/dom_types.ts | 6 +++--- cli/js/globals.ts | 6 ------ 3 files changed, 8 insertions(+), 14 deletions(-) diff --git a/cli/js/body.ts b/cli/js/body.ts index f495720678740c..e00cd30b94afad 100644 --- a/cli/js/body.ts +++ b/cli/js/body.ts @@ -3,7 +3,7 @@ import * as blob from "./blob.ts"; import * as encoding from "./text_encoding.ts"; import * as headers from "./headers.ts"; import * as domTypes from "./dom_types.ts"; -import { window } from "./window.ts"; +import { ReadableStream } from "./streams/mod.ts"; const { Headers } = headers; @@ -45,7 +45,7 @@ function validateBodyType(owner: Body, bodySource: BodySource): boolean { return true; } else if (typeof bodySource === "string") { return true; - } else if (bodySource instanceof window.ReadableStream) { + } else if (bodySource instanceof ReadableStream) { return true; } else if (bodySource instanceof FormData) { return true; @@ -144,12 +144,12 @@ export class Body implements domTypes.Body { return this._stream; } - if (this._bodySource instanceof window.ReadableStream) { + if (this._bodySource instanceof ReadableStream) { // @ts-ignore this._stream = this._bodySource; } if (typeof this._bodySource === "string") { - this._stream = new window.ReadableStream({ + this._stream = new ReadableStream({ start(controller: ReadableStreamController): void { controller.enqueue(this._bodySource); controller.close(); @@ -331,7 +331,7 @@ export class Body implements domTypes.Body { } else if (typeof this._bodySource === "string") { const enc = new TextEncoder(); return enc.encode(this._bodySource).buffer as ArrayBuffer; - } else if (this._bodySource instanceof window.ReadableStream) { + } else if (this._bodySource instanceof ReadableStream) { // @ts-ignore return bufferFromStream(this._bodySource.getReader()); } else if (this._bodySource instanceof FormData) { diff --git a/cli/js/dom_types.ts b/cli/js/dom_types.ts index aeba2771f6075c..991251eda8595e 100644 --- a/cli/js/dom_types.ts +++ b/cli/js/dom_types.ts @@ -332,9 +332,9 @@ export interface Body { export interface ReadableStream { readonly locked: boolean; - cancel(): Promise; + cancel(reason?: any): Promise; getReader(): ReadableStreamReader; - tee(): [ReadableStream, ReadableStream]; + tee(): ReadableStream[]; } export interface WritableStream { @@ -374,7 +374,7 @@ export interface UnderlyingSink { } export interface ReadableStreamReader { - cancel(): Promise; + cancel(reason?: any): Promise; read(): Promise; releaseLock(): void; } diff --git a/cli/js/globals.ts b/cli/js/globals.ts index f54cb5fa0eaf34..93f47ff1be8e6c 100644 --- a/cli/js/globals.ts +++ b/cli/js/globals.ts @@ -27,7 +27,6 @@ import * as urlSearchParams from "./url_search_params.ts"; import * as workers from "./workers.ts"; import * as performanceUtil from "./performance.ts"; import * as request from "./request.ts"; -import * as streams from "./streams/mod.ts"; // These imports are not exposed and therefore are fine to just import the // symbols required. @@ -138,11 +137,6 @@ export type Request = domTypes.Request; window.Response = fetchTypes.Response; export type Response = domTypes.Response; -window.ReadableStream = streams.ReadableStream; -export type ReadableStream = domTypes.ReadableStream; -window.WritableStream = streams.WritableStream; -export type WritableStream = domTypes.WritableStream; - window.performance = new performanceUtil.Performance(); // This variable functioning correctly depends on `declareAsLet` From 0867fd84630461d716499eb6c5f578a5ac7a484e Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 13:45:37 -0400 Subject: [PATCH 03/10] use eslint --fix on some lint errors --- cli/js/streams/pipe-to.ts | 2 +- cli/js/streams/readable-internals.ts | 6 +++--- cli/js/streams/readable-stream-byob-reader.ts | 2 +- cli/js/streams/readable-stream-default-reader.ts | 2 +- cli/js/streams/readable-stream.ts | 2 +- cli/js/streams/writable-internals.ts | 2 +- cli/js/streams/writable-stream.ts | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cli/js/streams/pipe-to.ts b/cli/js/streams/pipe-to.ts index 2dee4cdb9de407..8a66bc942f8e22 100644 --- a/cli/js/streams/pipe-to.ts +++ b/cli/js/streams/pipe-to.ts @@ -47,7 +47,7 @@ export function pipeTo( // TODO this should be a DOMException, // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38 const error = new DenoError(ErrorKind.AbortError, "Aborted"); - const actions: (() => Promise)[] = []; + const actions: Array<() => Promise> = []; if (preventAbort === false) { actions.push(() => { if (dest[shared.state_] === "writable") { diff --git a/cli/js/streams/readable-internals.ts b/cli/js/streams/readable-internals.ts index 0ec8375ab36fd4..a0e3eb0fbabeff 100644 --- a/cli/js/streams/readable-internals.ts +++ b/cli/js/streams/readable-internals.ts @@ -172,7 +172,7 @@ export declare class SDReadableStreamDefaultReader [ownerReadableStream_]: SDReadableStream | undefined; [closedPromise_]: shared.ControlledPromise; - [readRequests_]: ReadRequest>[]; + [readRequests_]: Array>>; } export declare class SDReadableStreamBYOBReader @@ -186,7 +186,7 @@ export declare class SDReadableStreamBYOBReader [ownerReadableStream_]: SDReadableStream | undefined; [closedPromise_]: shared.ControlledPromise; - [readIntoRequests_]: ReadRequest>[]; + [readIntoRequests_]: Array>>; } // ---- @@ -212,7 +212,7 @@ export declare class SDReadableStream { cancel(reason?: shared.ErrorResult): Promise; getReader(): SDReadableStreamReader; getReader(options: { mode: "byob" }): SDReadableStreamBYOBReader; - tee(): SDReadableStream[]; + tee(): Array>; pipeThrough( transform: GenericTransformStream, diff --git a/cli/js/streams/readable-stream-byob-reader.ts b/cli/js/streams/readable-stream-byob-reader.ts index 20d564c0c68d7c..18bbb3802bf2c5 100644 --- a/cli/js/streams/readable-stream-byob-reader.ts +++ b/cli/js/streams/readable-stream-byob-reader.ts @@ -15,7 +15,7 @@ export class SDReadableStreamBYOBReader implements rs.SDReadableStreamBYOBReader { [rs.closedPromise_]: shared.ControlledPromise; [rs.ownerReadableStream_]: rs.SDReadableStream | undefined; - [rs.readIntoRequests_]: rs.ReadRequest>[]; + [rs.readIntoRequests_]: Array>>; constructor(stream: rs.SDReadableStream) { if (!rs.isReadableStream(stream)) { diff --git a/cli/js/streams/readable-stream-default-reader.ts b/cli/js/streams/readable-stream-default-reader.ts index 578f35c4625bde..74453af527273b 100644 --- a/cli/js/streams/readable-stream-default-reader.ts +++ b/cli/js/streams/readable-stream-default-reader.ts @@ -15,7 +15,7 @@ export class ReadableStreamDefaultReader implements rs.SDReadableStreamReader { [rs.closedPromise_]: shared.ControlledPromise; [rs.ownerReadableStream_]: rs.SDReadableStream | undefined; - [rs.readRequests_]: rs.ReadRequest>[]; + [rs.readRequests_]: Array>>; constructor(stream: rs.SDReadableStream) { if (!rs.isReadableStream(stream)) { diff --git a/cli/js/streams/readable-stream.ts b/cli/js/streams/readable-stream.ts index 0b5114978aed6c..b9d0dad67e4534 100644 --- a/cli/js/streams/readable-stream.ts +++ b/cli/js/streams/readable-stream.ts @@ -129,7 +129,7 @@ export class SDReadableStream return rs.readableStreamCancel(this, reason); } - tee(): SDReadableStream[] { + tee(): Array> { return readableStreamTee(this, false); } diff --git a/cli/js/streams/writable-internals.ts b/cli/js/streams/writable-internals.ts index f59bc949487f37..e796a9d16be928 100644 --- a/cli/js/streams/writable-internals.ts +++ b/cli/js/streams/writable-internals.ts @@ -140,7 +140,7 @@ export declare class WritableStream { | WritableStreamDefaultController | undefined; [writer_]: WritableStreamDefaultWriter | undefined; - [writeRequests_]: shared.ControlledPromise[]; + [writeRequests_]: Array>; } // ---- Stream diff --git a/cli/js/streams/writable-stream.ts b/cli/js/streams/writable-stream.ts index 703c7196be465b..42de9b9681311f 100644 --- a/cli/js/streams/writable-stream.ts +++ b/cli/js/streams/writable-stream.ts @@ -29,7 +29,7 @@ export class WritableStream { | ws.WritableStreamDefaultController | undefined; [ws.writer_]: ws.WritableStreamDefaultWriter | undefined; - [ws.writeRequests_]: shared.ControlledPromise[]; + [ws.writeRequests_]: Array>; constructor( sink: ws.WritableStreamSink = {}, From 50890a660be76573b166a78d9fe395d9e5be674b Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 13:46:39 -0400 Subject: [PATCH 04/10] remove unused exports --- cli/js/streams/mod.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/cli/js/streams/mod.ts b/cli/js/streams/mod.ts index a6d7b4cceab783..bec6658c53757f 100644 --- a/cli/js/streams/mod.ts +++ b/cli/js/streams/mod.ts @@ -17,9 +17,3 @@ export { CountQueuingStrategy } from "./strategies.ts"; -// only for linked web standard implementations -export { - createReadableStream as internal_createReadableStream, - createReadableByteStream as internal_createReadableByteStream, - readableStreamTee as internal_readableStreamTee -} from "./readable-stream.ts"; From c74334d527e00301f5a4f1fea3fe065ba2c6c818 Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 14:07:12 -0400 Subject: [PATCH 05/10] supress the prefer-const eslint for now --- cli/js/streams/readable-stream.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cli/js/streams/readable-stream.ts b/cli/js/streams/readable-stream.ts index b9d0dad67e4534..49da4250153204 100644 --- a/cli/js/streams/readable-stream.ts +++ b/cli/js/streams/readable-stream.ts @@ -8,6 +8,11 @@ * https://github.com/stardazed/sd-streams */ +/* eslint prefer-const: "warn" */ +// TODO remove this, surpressed because of +// 284:7 error 'branch1' is never reassigned. Use 'const' instead prefer-const +// 285:7 error 'branch2' is never reassigned. Use 'const' instead prefer-const + import * as rs from "./readable-internals.ts"; import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; From d195cd466b1f18ebbfdc524fe43a31b1245526f3 Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 14:31:06 -0400 Subject: [PATCH 06/10] s/Error/Exception/ --- cli/js/errors.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/js/errors.ts b/cli/js/errors.ts index 57297f67269f74..02c9d33b3ec1f6 100644 --- a/cli/js/errors.ts +++ b/cli/js/errors.ts @@ -77,7 +77,7 @@ export enum ErrorKind { Diagnostic = 49, JSError = 50, - /** TODO These are DomError Types, and should be moved there when it exists */ + /** TODO These are DomException Types, and should be moved there when it exists */ DataCloneError = 51, AbortError = 52 } From 1cc6eb3a06c0800608bdb75ba572a5fea01bc4a6 Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Wed, 23 Oct 2019 15:56:53 -0400 Subject: [PATCH 07/10] resolve eslint warnings --- cli/js/streams/mod.ts | 1 - cli/js/streams/pipe-to.ts | 23 ++-- cli/js/streams/queue-mixin.ts | 11 +- cli/js/streams/queue.ts | 2 +- .../readable-byte-stream-controller.ts | 19 ++-- cli/js/streams/readable-internals.ts | 105 +++++++++--------- cli/js/streams/readable-stream-byob-reader.ts | 4 +- .../streams/readable-stream-byob-request.ts | 4 +- .../readable-stream-default-controller.ts | 19 ++-- .../streams/readable-stream-default-reader.ts | 2 +- cli/js/streams/readable-stream.ts | 19 ++-- cli/js/streams/shared-internals.ts | 29 ++--- cli/js/streams/strategies.ts | 7 +- cli/js/streams/transform-internals.ts | 48 ++++---- cli/js/streams/transform-stream.ts | 9 +- cli/js/streams/writable-internals.ts | 83 ++++++++------ .../writable-stream-default-controller.ts | 13 ++- cli/js/streams/writable-stream.ts | 4 +- 18 files changed, 225 insertions(+), 177 deletions(-) diff --git a/cli/js/streams/mod.ts b/cli/js/streams/mod.ts index bec6658c53757f..97386b1d2ee3fd 100644 --- a/cli/js/streams/mod.ts +++ b/cli/js/streams/mod.ts @@ -16,4 +16,3 @@ export { ByteLengthQueuingStrategy, CountQueuingStrategy } from "./strategies.ts"; - diff --git a/cli/js/streams/pipe-to.ts b/cli/js/streams/pipe-to.ts index 8a66bc942f8e22..18d2bd4f82f872 100644 --- a/cli/js/streams/pipe-to.ts +++ b/cli/js/streams/pipe-to.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as rs from "./readable-internals.ts"; import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; @@ -26,7 +29,7 @@ export function pipeTo( source: rs.SDReadableStream, dest: ws.WritableStream, options: PipeOptions -) { +): Promise { const preventClose = !!options.preventClose; const preventAbort = !!options.preventAbort; const preventCancel = !!options.preventCancel; @@ -43,7 +46,7 @@ export function pipeTo( let abortAlgorithm: () => any; if (signal !== undefined) { - abortAlgorithm = () => { + abortAlgorithm = (): void => { // TODO this should be a DOMException, // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38 const error = new DenoError(ErrorKind.AbortError, "Aborted"); @@ -83,7 +86,7 @@ export function pipeTo( stream: rs.SDReadableStream | ws.WritableStream, promise: Promise, action: (error: shared.ErrorResult) => void - ) { + ): void { if (stream[shared.state_] === "errored") { action(stream[shared.storedError_]); } else { @@ -95,7 +98,7 @@ export function pipeTo( stream: rs.SDReadableStream | ws.WritableStream, promise: Promise, action: () => void - ) { + ): void { if (stream[shared.state_] === "closed") { action(); } else { @@ -155,7 +158,7 @@ export function pipeTo( ); } - function flushRemainder() { + function flushRemainder(): Promise | undefined { if ( dest[shared.state_] === "writable" && !ws.writableStreamCloseQueuedOrInFlight(dest) @@ -166,17 +169,17 @@ export function pipeTo( } } - function shutDown(action?: () => Promise, error?: ErrorWrapper) { + function shutDown(action?: () => Promise, error?: ErrorWrapper): void { if (shuttingDown) { return; } shuttingDown = true; if (action === undefined) { - action = () => Promise.resolve(); + action = (): Promise => Promise.resolve(); } - function finishShutDown() { + function finishShutDown(): void { action!().then( _ => finalize(error), newError => finalize({ actualError: newError }) @@ -191,7 +194,7 @@ export function pipeTo( } } - function finalize(error?: ErrorWrapper) { + function finalize(error?: ErrorWrapper): void { ws.writableStreamDefaultWriterRelease(writer); rs.readableStreamReaderGenericRelease(reader); if (signal && abortAlgorithm) { @@ -204,7 +207,7 @@ export function pipeTo( } } - function next() { + function next(): Promise | undefined { if (shuttingDown) { return; } diff --git a/cli/js/streams/queue-mixin.ts b/cli/js/streams/queue-mixin.ts index 0fa47893368c50..23c57d75ffde2f 100644 --- a/cli/js/streams/queue-mixin.ts +++ b/cli/js/streams/queue-mixin.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import { Queue, QueueImpl } from "./queue.ts"; import { isFiniteNonNegativeNumber } from "./shared-internals.ts"; @@ -33,7 +36,7 @@ export interface ByteQueueContainer { [queueTotalSize_]: number; } -export function dequeueValue(container: QueueContainer) { +export function dequeueValue(container: QueueContainer): V { // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. // Assert: container.[[queue]] is not empty. const pair = container[queue_].shift()!; @@ -46,7 +49,7 @@ export function enqueueValueWithSize( container: QueueContainer, value: V, size: number -) { +): void { // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. if (!isFiniteNonNegativeNumber(size)) { throw new RangeError("Chunk size must be a non-negative, finite numbers"); @@ -55,7 +58,7 @@ export function enqueueValueWithSize( container[queueTotalSize_] += size; } -export function peekQueueValue(container: QueueContainer) { +export function peekQueueValue(container: QueueContainer): V { // Assert: container has[[queue]] and[[queueTotalSize]] internal slots. // Assert: container.[[queue]] is not empty. return container[queue_].front()!.value; @@ -63,7 +66,7 @@ export function peekQueueValue(container: QueueContainer) { export function resetQueue( container: ByteQueueContainer | QueueContainer -) { +): void { // Chrome (as of v67) has a steep performance cliff with large arrays // and shift(), around about 50k elements. While this is an unusual case // we use a simple wrapper around shift and push that is chunked to diff --git a/cli/js/streams/queue.ts b/cli/js/streams/queue.ts index e243ba169b4e08..264851baf4b092 100644 --- a/cli/js/streams/queue.ts +++ b/cli/js/streams/queue.ts @@ -59,7 +59,7 @@ export class QueueImpl implements Queue { return t; } - get length() { + get length(): number { return this.length_; } } diff --git a/cli/js/streams/readable-byte-stream-controller.ts b/cli/js/streams/readable-byte-stream-controller.ts index 0ac1659feff764..86efd416c2aa92 100644 --- a/cli/js/streams/readable-byte-stream-controller.ts +++ b/cli/js/streams/readable-byte-stream-controller.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as rs from "./readable-internals.ts"; import * as q from "./queue-mixin.ts"; import * as shared from "./shared-internals.ts"; @@ -70,7 +73,7 @@ export class ReadableByteStreamController return rs.readableByteStreamControllerGetDesiredSize(this); } - close() { + close(): void { if (!rs.isReadableByteStreamController(this)) { throw new TypeError(); } @@ -83,7 +86,7 @@ export class ReadableByteStreamController rs.readableByteStreamControllerClose(this); } - enqueue(chunk: ArrayBufferView) { + enqueue(chunk: ArrayBufferView): void { if (!rs.isReadableByteStreamController(this)) { throw new TypeError(); } @@ -100,14 +103,14 @@ export class ReadableByteStreamController return rs.readableByteStreamControllerEnqueue(this, chunk); } - error(error?: shared.ErrorResult) { + error(error?: shared.ErrorResult): void { if (!rs.isReadableByteStreamController(this)) { throw new TypeError(); } rs.readableByteStreamControllerError(this, error); } - [rs.cancelSteps_](reason: shared.ErrorResult) { + [rs.cancelSteps_](reason: shared.ErrorResult): Promise { if (this[rs.pendingPullIntos_].length > 0) { const firstDescriptor = this[rs.pendingPullIntos_][0]; firstDescriptor.bytesFilled = 0; @@ -118,7 +121,9 @@ export class ReadableByteStreamController return result; } - [rs.pullSteps_](forAuthorCode: boolean) { + [rs.pullSteps_]( + forAuthorCode: boolean + ): Promise> { const stream = this[rs.controlledReadableByteStream_]; // Assert: ! ReadableStreamHasDefaultReader(stream) is true. if (this[q.queueTotalSize_] > 0) { @@ -165,13 +170,13 @@ export function setUpReadableByteStreamControllerFromUnderlyingSource( stream: rs.SDReadableStream, underlyingByteSource: UnderlyingByteSource, highWaterMark: number -) { +): void { // Assert: underlyingByteSource is not undefined. const controller = Object.create( ReadableByteStreamController.prototype ) as ReadableByteStreamController; - const startAlgorithm = () => { + const startAlgorithm = (): any => { return shared.invokeOrNoop(underlyingByteSource, "start", [controller]); }; const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod( diff --git a/cli/js/streams/readable-internals.ts b/cli/js/streams/readable-internals.ts index a0e3eb0fbabeff..6f892a4c8d7e09 100644 --- a/cli/js/streams/readable-internals.ts +++ b/cli/js/streams/readable-internals.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; import * as q from "./queue-mixin.ts"; @@ -233,7 +236,7 @@ export declare class SDReadableStream { export function initializeReadableStream( stream: SDReadableStream -) { +): void { stream[shared.state_] = "readable"; stream[reader_] = undefined; stream[shared.storedError_] = undefined; @@ -251,13 +254,13 @@ export function isReadableStream( export function isReadableStreamLocked( stream: SDReadableStream -) { +): boolean { return stream[reader_] !== undefined; } export function readableStreamGetNumReadIntoRequests( stream: SDReadableStream -) { +): number | undefined { // TODO remove the "as unknown" cast // This is in to workaround a compiler error // error TS2352: Conversion of type 'SDReadableStreamReader' to type 'SDReadableStreamBYOBReader' may be a mistake because neither type sufficiently overlaps with the other. If this was intentional, convert the expression to 'unknown' first. @@ -271,7 +274,7 @@ export function readableStreamGetNumReadIntoRequests( export function readableStreamGetNumReadRequests( stream: SDReadableStream -) { +): number { const reader = stream[reader_] as SDReadableStreamDefaultReader; if (reader === undefined) { return 0; @@ -294,7 +297,7 @@ export function readableStreamCreateReadResult( export function readableStreamAddReadIntoRequest( stream: SDReadableStream, forAuthorCode: boolean -) { +): Promise> { // Assert: ! IsReadableStreamBYOBReader(stream.[[reader]]) is true. // Assert: stream.[[state]] is "readable" or "closed". const reader = stream[reader_] as SDReadableStreamBYOBReader; @@ -309,7 +312,7 @@ export function readableStreamAddReadIntoRequest( export function readableStreamAddReadRequest( stream: SDReadableStream, forAuthorCode: boolean -) { +): Promise> { // Assert: ! IsReadableStreamDefaultReader(stream.[[reader]]) is true. // Assert: stream.[[state]] is "readable". const reader = stream[reader_] as SDReadableStreamDefaultReader; @@ -323,14 +326,14 @@ export function readableStreamAddReadRequest( export function readableStreamHasBYOBReader( stream: SDReadableStream -) { +): boolean { const reader = stream[reader_]; return isReadableStreamBYOBReader(reader); } export function readableStreamHasDefaultReader( stream: SDReadableStream -) { +): boolean { const reader = stream[reader_]; return isReadableStreamDefaultReader(reader); } @@ -338,7 +341,7 @@ export function readableStreamHasDefaultReader( export function readableStreamCancel( stream: SDReadableStream, reason: shared.ErrorResult -) { +): Promise { if (stream[shared.state_] === "closed") { return Promise.resolve(undefined); } @@ -355,7 +358,7 @@ export function readableStreamCancel( export function readableStreamClose( stream: SDReadableStream -) { +): void { // Assert: stream.[[state]] is "readable". stream[shared.state_] = "closed"; const reader = stream[reader_]; @@ -382,7 +385,7 @@ export function readableStreamClose( export function readableStreamError( stream: SDReadableStream, error: shared.ErrorResult -) { +): void { if (stream[shared.state_] !== "readable") { throw new RangeError("Stream is in an invalid state"); } @@ -437,7 +440,7 @@ export function isReadableStreamBYOBReader( export function readableStreamReaderGenericInitialize( reader: SDReadableStreamReader, stream: SDReadableStream -) { +): void { reader[ownerReadableStream_] = stream; stream[reader_] = reader; const streamState = stream[shared.state_]; @@ -455,7 +458,7 @@ export function readableStreamReaderGenericInitialize( export function readableStreamReaderGenericRelease( reader: SDReadableStreamReader -) { +): void { // Assert: reader.[[ownerReadableStream]] is not undefined. // Assert: reader.[[ownerReadableStream]].[[reader]] is reader. const stream = reader[ownerReadableStream_]; @@ -479,7 +482,7 @@ export function readableStreamBYOBReaderRead( reader: SDReadableStreamBYOBReader, view: ArrayBufferView, forAuthorCode = false -) { +): Promise> { const stream = reader[ownerReadableStream_]!; // Assert: stream is not undefined. @@ -516,7 +519,7 @@ export function readableStreamFulfillReadIntoRequest( stream: SDReadableStream, chunk: ArrayBufferView, done: boolean -) { +): void { // TODO remove the "as unknown" cast const reader = (stream[reader_] as unknown) as SDReadableStreamBYOBReader; const readIntoRequest = reader[readIntoRequests_].shift()!; // <-- length check done in caller @@ -529,7 +532,7 @@ export function readableStreamFulfillReadRequest( stream: SDReadableStream, chunk: OutputType, done: boolean -) { +): void { const reader = stream[reader_] as SDReadableStreamDefaultReader; const readRequest = reader[readRequests_].shift()!; // <-- length check done in caller readRequest.resolve( @@ -547,7 +550,7 @@ export function setUpReadableStreamDefaultController( cancelAlgorithm: CancelAlgorithm, highWaterMark: number, sizeAlgorithm: QueuingStrategySizeCallback -) { +): void { // Assert: stream.[[readableStreamController]] is undefined. controller[controlledReadableStream_] = stream; q.resetQueue(controller); @@ -586,20 +589,20 @@ export function isReadableStreamDefaultController( export function readableStreamDefaultControllerHasBackpressure( controller: SDReadableStreamDefaultController -) { +): boolean { return !readableStreamDefaultControllerShouldCallPull(controller); } export function readableStreamDefaultControllerCanCloseOrEnqueue( controller: SDReadableStreamDefaultController -) { +): boolean { const state = controller[controlledReadableStream_][shared.state_]; return controller[closeRequested_] === false && state === "readable"; } export function readableStreamDefaultControllerGetDesiredSize( controller: SDReadableStreamDefaultController -) { +): number | null { const state = controller[controlledReadableStream_][shared.state_]; if (state === "errored") { return null; @@ -612,7 +615,7 @@ export function readableStreamDefaultControllerGetDesiredSize( export function readableStreamDefaultControllerClose( controller: SDReadableStreamDefaultController -) { +): void { // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true. controller[closeRequested_] = true; const stream = controller[controlledReadableStream_]; @@ -625,7 +628,7 @@ export function readableStreamDefaultControllerClose( export function readableStreamDefaultControllerEnqueue( controller: SDReadableStreamDefaultController, chunk: OutputType -) { +): void { const stream = controller[controlledReadableStream_]; // Assert: !ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) is true. if ( @@ -657,7 +660,7 @@ export function readableStreamDefaultControllerEnqueue( export function readableStreamDefaultControllerError( controller: SDReadableStreamDefaultController, error: shared.ErrorResult -) { +): void { const stream = controller[controlledReadableStream_]; if (stream[shared.state_] !== "readable") { return; @@ -669,7 +672,7 @@ export function readableStreamDefaultControllerError( export function readableStreamDefaultControllerCallPullIfNeeded( controller: SDReadableStreamDefaultController -) { +): void { if (!readableStreamDefaultControllerShouldCallPull(controller)) { return; } @@ -698,7 +701,7 @@ export function readableStreamDefaultControllerCallPullIfNeeded( export function readableStreamDefaultControllerShouldCallPull( controller: SDReadableStreamDefaultController -) { +): boolean { const stream = controller[controlledReadableStream_]; if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) { return false; @@ -721,7 +724,7 @@ export function readableStreamDefaultControllerShouldCallPull( export function readableStreamDefaultControllerClearAlgorithms( controller: SDReadableStreamDefaultController -) { +): void { controller[pullAlgorithm_] = undefined!; controller[cancelAlgorithm_] = undefined!; controller[strategySizeAlgorithm_] = undefined!; @@ -737,7 +740,7 @@ export function setUpReadableByteStreamController( cancelAlgorithm: CancelAlgorithm, highWaterMark: number, autoAllocateChunkSize: number | undefined -) { +): void { // Assert: stream.[[readableStreamController]] is undefined. if (stream[readableStreamController_] !== undefined) { throw new TypeError("Cannot reuse streams"); @@ -805,7 +808,7 @@ export function isReadableByteStreamController( export function readableByteStreamControllerCallPullIfNeeded( controller: SDReadableByteStreamController -) { +): void { if (!readableByteStreamControllerShouldCallPull(controller)) { return; } @@ -831,21 +834,21 @@ export function readableByteStreamControllerCallPullIfNeeded( export function readableByteStreamControllerClearAlgorithms( controller: SDReadableByteStreamController -) { +): void { controller[pullAlgorithm_] = undefined!; controller[cancelAlgorithm_] = undefined!; } export function readableByteStreamControllerClearPendingPullIntos( controller: SDReadableByteStreamController -) { +): void { readableByteStreamControllerInvalidateBYOBRequest(controller); controller[pendingPullIntos_] = []; } export function readableByteStreamControllerClose( controller: SDReadableByteStreamController -) { +): void { const stream = controller[controlledReadableByteStream_]; // Assert: controller.[[closeRequested]] is false. // Assert: stream.[[state]] is "readable". @@ -868,7 +871,7 @@ export function readableByteStreamControllerClose( export function readableByteStreamControllerCommitPullIntoDescriptor( stream: SDReadableStream, pullIntoDescriptor: PullIntoDescriptor -) { +): void { // Assert: stream.[[state]] is not "errored". let done = false; if (stream[shared.state_] === "closed") { @@ -888,7 +891,7 @@ export function readableByteStreamControllerCommitPullIntoDescriptor( export function readableByteStreamControllerConvertPullIntoDescriptor( pullIntoDescriptor: PullIntoDescriptor -) { +): ArrayBufferView { const { bytesFilled, elementSize } = pullIntoDescriptor; // Assert: bytesFilled <= pullIntoDescriptor.byteLength // Assert: bytesFilled mod elementSize is 0 @@ -902,7 +905,7 @@ export function readableByteStreamControllerConvertPullIntoDescriptor( export function readableByteStreamControllerEnqueue( controller: SDReadableByteStreamController, chunk: ArrayBufferView -) { +): void { const stream = controller[controlledReadableByteStream_]; // Assert: controller.[[closeRequested]] is false. // Assert: stream.[[state]] is "readable". @@ -954,7 +957,7 @@ export function readableByteStreamControllerEnqueueChunkToQueue( buffer: ArrayBufferLike, byteOffset: number, byteLength: number -) { +): void { controller[q.queue_].push({ buffer, byteOffset, byteLength }); controller[q.queueTotalSize_] += byteLength; } @@ -962,7 +965,7 @@ export function readableByteStreamControllerEnqueueChunkToQueue( export function readableByteStreamControllerError( controller: SDReadableByteStreamController, error: shared.ErrorResult -) { +): void { const stream = controller[controlledReadableByteStream_]; if (stream[shared.state_] !== "readable") { return; @@ -977,7 +980,7 @@ export function readableByteStreamControllerFillHeadPullIntoDescriptor( controller: SDReadableByteStreamController, size: number, pullIntoDescriptor: PullIntoDescriptor -) { +): void { // Assert: either controller.[[pendingPullIntos]] is empty, or the first element of controller.[[pendingPullIntos]] is pullIntoDescriptor. readableByteStreamControllerInvalidateBYOBRequest(controller); pullIntoDescriptor.bytesFilled += size; @@ -986,7 +989,7 @@ export function readableByteStreamControllerFillHeadPullIntoDescriptor( export function readableByteStreamControllerFillPullIntoDescriptorFromQueue( controller: SDReadableByteStreamController, pullIntoDescriptor: PullIntoDescriptor -) { +): boolean { const elementSize = pullIntoDescriptor.elementSize; const currentAlignedBytes = pullIntoDescriptor.bytesFilled - @@ -1046,7 +1049,7 @@ export function readableByteStreamControllerFillPullIntoDescriptorFromQueue( export function readableByteStreamControllerGetDesiredSize( controller: SDReadableByteStreamController -) { +): number | null { const stream = controller[controlledReadableByteStream_]; const state = stream[shared.state_]; if (state === "errored") { @@ -1060,7 +1063,7 @@ export function readableByteStreamControllerGetDesiredSize( export function readableByteStreamControllerHandleQueueDrain( controller: SDReadableByteStreamController -) { +): void { // Assert: controller.[[controlledReadableByteStream]].[[state]] is "readable". if (controller[q.queueTotalSize_] === 0 && controller[closeRequested_]) { readableByteStreamControllerClearAlgorithms(controller); @@ -1072,7 +1075,7 @@ export function readableByteStreamControllerHandleQueueDrain( export function readableByteStreamControllerInvalidateBYOBRequest( controller: SDReadableByteStreamController -) { +): void { const byobRequest = controller[byobRequest_]; if (byobRequest === undefined) { return; @@ -1084,7 +1087,7 @@ export function readableByteStreamControllerInvalidateBYOBRequest( export function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( controller: SDReadableByteStreamController -) { +): void { // Assert: controller.[[closeRequested]] is false. const pendingPullIntos = controller[pendingPullIntos_]; while (pendingPullIntos.length > 0) { @@ -1111,7 +1114,7 @@ export function readableByteStreamControllerPullInto( controller: SDReadableByteStreamController, view: ArrayBufferView, forAuthorCode: boolean -) { +): Promise> { const stream = controller[controlledReadableByteStream_]; const elementSize = (view as Uint8Array).BYTES_PER_ELEMENT || 1; // DataView exposes this in Webkit as 1, is not present in FF or Blink @@ -1176,7 +1179,7 @@ export function readableByteStreamControllerPullInto( export function readableByteStreamControllerRespond( controller: SDReadableByteStreamController, bytesWritten: number -) { +): void { bytesWritten = Number(bytesWritten); if (!shared.isFiniteNonNegativeNumber(bytesWritten)) { throw new RangeError("bytesWritten must be a finite, non-negative number"); @@ -1188,7 +1191,7 @@ export function readableByteStreamControllerRespond( export function readableByteStreamControllerRespondInClosedState( controller: SDReadableByteStreamController, firstDescriptor: PullIntoDescriptor -) { +): void { firstDescriptor.buffer = shared.transferArrayBuffer(firstDescriptor.buffer); // Assert: firstDescriptor.[[bytesFilled]] is 0. const stream = controller[controlledReadableByteStream_]; @@ -1209,7 +1212,7 @@ export function readableByteStreamControllerRespondInReadableState( controller: SDReadableByteStreamController, bytesWritten: number, pullIntoDescriptor: PullIntoDescriptor -) { +): void { if ( pullIntoDescriptor.bytesFilled + bytesWritten > pullIntoDescriptor.byteLength @@ -1257,7 +1260,7 @@ export function readableByteStreamControllerRespondInReadableState( export function readableByteStreamControllerRespondInternal( controller: SDReadableByteStreamController, bytesWritten: number -) { +): void { const firstDescriptor = controller[pendingPullIntos_][0]; const stream = controller[controlledReadableByteStream_]; if (stream[shared.state_] === "closed") { @@ -1282,7 +1285,7 @@ export function readableByteStreamControllerRespondInternal( export function readableByteStreamControllerRespondWithNewView( controller: SDReadableByteStreamController, view: ArrayBufferView -) { +): void { // Assert: controller.[[pendingPullIntos]] is not empty. const firstDescriptor = controller[pendingPullIntos_][0]; if ( @@ -1300,7 +1303,7 @@ export function readableByteStreamControllerRespondWithNewView( export function readableByteStreamControllerShiftPendingPullInto( controller: SDReadableByteStreamController -) { +): PullIntoDescriptor | undefined { const descriptor = controller[pendingPullIntos_].shift(); readableByteStreamControllerInvalidateBYOBRequest(controller); return descriptor; @@ -1308,7 +1311,7 @@ export function readableByteStreamControllerShiftPendingPullInto( export function readableByteStreamControllerShouldCallPull( controller: SDReadableByteStreamController -) { +): boolean { // Let stream be controller.[[controlledReadableByteStream]]. const stream = controller[controlledReadableByteStream_]; if (stream[shared.state_] !== "readable") { @@ -1341,7 +1344,7 @@ export function setUpReadableStreamBYOBRequest( request: SDReadableStreamBYOBRequest, controller: SDReadableByteStreamController, view: ArrayBufferView -) { +): void { if (!isReadableByteStreamController(controller)) { throw new TypeError(); } diff --git a/cli/js/streams/readable-stream-byob-reader.ts b/cli/js/streams/readable-stream-byob-reader.ts index 18bbb3802bf2c5..0f9bfb037848d3 100644 --- a/cli/js/streams/readable-stream-byob-reader.ts +++ b/cli/js/streams/readable-stream-byob-reader.ts @@ -15,7 +15,9 @@ export class SDReadableStreamBYOBReader implements rs.SDReadableStreamBYOBReader { [rs.closedPromise_]: shared.ControlledPromise; [rs.ownerReadableStream_]: rs.SDReadableStream | undefined; - [rs.readIntoRequests_]: Array>>; + [rs.readIntoRequests_]: Array< + rs.ReadRequest> + >; constructor(stream: rs.SDReadableStream) { if (!rs.isReadableStream(stream)) { diff --git a/cli/js/streams/readable-stream-byob-request.ts b/cli/js/streams/readable-stream-byob-request.ts index ff96613084fb9c..25b937f1088535 100644 --- a/cli/js/streams/readable-stream-byob-request.ts +++ b/cli/js/streams/readable-stream-byob-request.ts @@ -27,7 +27,7 @@ export class ReadableStreamBYOBRequest { return this[rs.view_]!; } - respond(bytesWritten: number) { + respond(bytesWritten: number): void { if (!rs.isReadableStreamBYOBRequest(this)) { throw new TypeError(); } @@ -41,7 +41,7 @@ export class ReadableStreamBYOBRequest { ); } - respondWithNewView(view: ArrayBufferView) { + respondWithNewView(view: ArrayBufferView): void { if (!rs.isReadableStreamBYOBRequest(this)) { throw new TypeError(); } diff --git a/cli/js/streams/readable-stream-default-controller.ts b/cli/js/streams/readable-stream-default-controller.ts index dd8b98a9d7ff85..e9ddce1bcba7ab 100644 --- a/cli/js/streams/readable-stream-default-controller.ts +++ b/cli/js/streams/readable-stream-default-controller.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as rs from "./readable-internals.ts"; import * as shared from "./shared-internals.ts"; import * as q from "./queue-mixin.ts"; @@ -37,7 +40,7 @@ export class ReadableStreamDefaultController return rs.readableStreamDefaultControllerGetDesiredSize(this); } - close() { + close(): void { if (!rs.isReadableStreamDefaultController(this)) { throw new TypeError(); } @@ -49,7 +52,7 @@ export class ReadableStreamDefaultController rs.readableStreamDefaultControllerClose(this); } - enqueue(chunk?: OutputType) { + enqueue(chunk?: OutputType): void { if (!rs.isReadableStreamDefaultController(this)) { throw new TypeError(); } @@ -61,21 +64,23 @@ export class ReadableStreamDefaultController rs.readableStreamDefaultControllerEnqueue(this, chunk!); } - error(e?: shared.ErrorResult) { + error(e?: shared.ErrorResult): void { if (!rs.isReadableStreamDefaultController(this)) { throw new TypeError(); } rs.readableStreamDefaultControllerError(this, e); } - [rs.cancelSteps_](reason: shared.ErrorResult) { + [rs.cancelSteps_](reason: shared.ErrorResult): Promise { q.resetQueue(this); const result = this[rs.cancelAlgorithm_](reason); rs.readableStreamDefaultControllerClearAlgorithms(this); return result; } - [rs.pullSteps_](forAuthorCode: boolean) { + [rs.pullSteps_]( + forAuthorCode: boolean + ): Promise> { const stream = this[rs.controlledReadableStream_]; if (this[q.queue_].length > 0) { const chunk = q.dequeueValue(this); @@ -106,10 +111,10 @@ export function setUpReadableStreamDefaultControllerFromUnderlyingSource< underlyingSource: UnderlyingSource, highWaterMark: number, sizeAlgorithm: QueuingStrategySizeCallback -) { +): void { // Assert: underlyingSource is not undefined. const controller = Object.create(ReadableStreamDefaultController.prototype); - const startAlgorithm = () => { + const startAlgorithm = (): any => { return shared.invokeOrNoop(underlyingSource, "start", [controller]); }; const pullAlgorithm = shared.createAlgorithmFromUnderlyingMethod( diff --git a/cli/js/streams/readable-stream-default-reader.ts b/cli/js/streams/readable-stream-default-reader.ts index 74453af527273b..eb1910a9d5263a 100644 --- a/cli/js/streams/readable-stream-default-reader.ts +++ b/cli/js/streams/readable-stream-default-reader.ts @@ -60,7 +60,7 @@ export class ReadableStreamDefaultReader return rs.readableStreamDefaultReaderRead(this, true); } - releaseLock() { + releaseLock(): void { if (!rs.isReadableStreamDefaultReader(this)) { throw new TypeError(); } diff --git a/cli/js/streams/readable-stream.ts b/cli/js/streams/readable-stream.ts index 49da4250153204..cadd858ddfa7af 100644 --- a/cli/js/streams/readable-stream.ts +++ b/cli/js/streams/readable-stream.ts @@ -8,10 +8,9 @@ * https://github.com/stardazed/sd-streams */ -/* eslint prefer-const: "warn" */ +/* eslint prefer-const: "off" */ // TODO remove this, surpressed because of // 284:7 error 'branch1' is never reassigned. Use 'const' instead prefer-const -// 285:7 error 'branch2' is never reassigned. Use 'const' instead prefer-const import * as rs from "./readable-internals.ts"; import * as ws from "./writable-internals.ts"; @@ -202,12 +201,12 @@ export function createReadableStream( cancelAlgorithm: rs.CancelAlgorithm, highWaterMark?: number, sizeAlgorithm?: QueuingStrategySizeCallback -) { +): SDReadableStream { if (highWaterMark === undefined) { highWaterMark = 1; } if (sizeAlgorithm === undefined) { - sizeAlgorithm = () => 1; + sizeAlgorithm = (): number => 1; } // Assert: ! IsNonNegativeNumber(highWaterMark) is true. @@ -236,7 +235,7 @@ export function createReadableByteStream( cancelAlgorithm: rs.CancelAlgorithm, highWaterMark?: number, autoAllocateChunkSize?: number -) { +): SDReadableStream { if (highWaterMark === undefined) { highWaterMark = 0; } @@ -274,7 +273,7 @@ export function createReadableByteStream( export function readableStreamTee( stream: SDReadableStream, cloneForBranch2: boolean -) { +): [SDReadableStream, SDReadableStream] { if (!rs.isReadableStream(stream)) { throw new TypeError(); } @@ -291,7 +290,7 @@ export function readableStreamTee( let cancelResolve: (reason: shared.ErrorResult) => void; const cancelPromise = new Promise(resolve => (cancelResolve = resolve)); - const pullAlgorithm = () => { + const pullAlgorithm = (): Promise => { return rs .readableStreamDefaultReaderRead(reader) .then(({ value, done }) => { @@ -335,7 +334,7 @@ export function readableStreamTee( }); }; - const cancel1Algorithm = (reason: shared.ErrorResult) => { + const cancel1Algorithm = (reason: shared.ErrorResult): Promise => { canceled1 = true; reason1 = reason; if (canceled2) { @@ -345,7 +344,7 @@ export function readableStreamTee( return cancelPromise; }; - const cancel2Algorithm = (reason: shared.ErrorResult) => { + const cancel2Algorithm = (reason: shared.ErrorResult): Promise => { canceled2 = true; reason2 = reason; if (canceled1) { @@ -355,7 +354,7 @@ export function readableStreamTee( return cancelPromise; }; - const startAlgorithm = () => undefined; + const startAlgorithm = (): undefined => undefined; branch1 = createReadableStream( startAlgorithm, pullAlgorithm, diff --git a/cli/js/streams/shared-internals.ts b/cli/js/streams/shared-internals.ts index 3442e5323dfe80..90e66e59173148 100644 --- a/cli/js/streams/shared-internals.ts +++ b/cli/js/streams/shared-internals.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO don't disable this warning + import { AbortSignal, QueuingStrategySizeCallback } from "../dom_types.ts"; import { DenoError, ErrorKind } from "../errors.ts"; @@ -23,7 +26,7 @@ export type ErrorResult = any; // --------- -export function isInteger(value: number) { +export function isInteger(value: number): boolean { if (!isFinite(value)) { // covers NaN, +Infinity and -Infinity return false; @@ -32,7 +35,7 @@ export function isInteger(value: number) { return Math.floor(absValue) === absValue; } -export function isFiniteNonNegativeNumber(value: unknown) { +export function isFiniteNonNegativeNumber(value: unknown): boolean { if (!(typeof value === "number" && isFinite(value))) { // covers NaN, +Infinity and -Infinity return false; @@ -59,7 +62,7 @@ export function invokeOrNoop( o: O, p: P, args: any[] -) { +): any { // Assert: O is not undefined. // Assert: IsPropertyKey(P) is true. // Assert: args is a List. @@ -83,7 +86,7 @@ export function cloneArrayBuffer( ) as InstanceType; } -export function transferArrayBuffer(buffer: ArrayBufferLike) { +export function transferArrayBuffer(buffer: ArrayBufferLike): ArrayBuffer { // This would in a JS engine context detach the buffer's backing store and return // a new ArrayBuffer with the same backing store, invalidating `buffer`, // i.e. a move operation in C++ parlance. @@ -97,7 +100,7 @@ export function copyDataBlockBytes( fromBlock: ArrayBufferLike, fromIndex: number, count: number -) { +): void { new Uint8Array(toBlock, toIndex, count).set( new Uint8Array(fromBlock, fromIndex, count) ); @@ -216,7 +219,7 @@ export function promiseCall( f: F, v: object | undefined, args: any[] -) { +): Promise { // tslint:disable-line:ban-types try { const result = Function.prototype.apply.call(f, v, args); @@ -229,15 +232,15 @@ export function promiseCall( export function createAlgorithmFromUnderlyingMethod< O extends object, K extends keyof O ->(obj: O, methodName: K, extraArgs: any[]) { +>(obj: O, methodName: K, extraArgs: any[]): any { const method = obj[methodName]; if (method === undefined) { - return () => Promise.resolve(undefined); + return (): any => Promise.resolve(undefined); } if (typeof method !== "function") { throw new TypeError(`Field "${methodName}" is not a function.`); } - return function(...fnArgs: any[]) { + return function(...fnArgs: any[]): any { return promiseCall(method, obj, fnArgs.concat(extraArgs)); }; } @@ -250,7 +253,7 @@ function createIterResultObject(value: T, done: boolean): IteratorResult { } */ -export function validateAndNormalizeHighWaterMark(hwm: unknown) { +export function validateAndNormalizeHighWaterMark(hwm: unknown): number { const highWaterMark = Number(hwm); if (isNaN(highWaterMark) || highWaterMark < 0) { throw new RangeError( @@ -266,7 +269,7 @@ export function makeSizeAlgorithmFromSizeFunction( if (typeof sizeFn !== "function" && typeof sizeFn !== "undefined") { throw new TypeError("size function must be undefined or a function"); } - return function(chunk: T) { + return function(chunk: T): number { if (typeof sizeFn === "function") { return sizeFn(chunk); } @@ -294,11 +297,11 @@ export function createControlledPromise(): ControlledPromise { state: ControlledPromiseState.Pending } as ControlledPromise; conProm.promise = new Promise(function(resolve, reject) { - conProm.resolve = function(v?: V) { + conProm.resolve = function(v?: V): void { conProm.state = ControlledPromiseState.Resolved; resolve(v); }; - conProm.reject = function(e?: ErrorResult) { + conProm.reject = function(e?: ErrorResult): void { conProm.state = ControlledPromiseState.Rejected; reject(e); }; diff --git a/cli/js/streams/strategies.ts b/cli/js/streams/strategies.ts index 84c09d84583a1e..5f7ffc6324f512 100644 --- a/cli/js/streams/strategies.ts +++ b/cli/js/streams/strategies.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import { QueuingStrategy } from "../dom_types.ts"; export class ByteLengthQueuingStrategy @@ -18,7 +21,7 @@ export class ByteLengthQueuingStrategy this.highWaterMark = options.highWaterMark; } - size(chunk: ArrayBufferView) { + size(chunk: ArrayBufferView): number { return chunk.byteLength; } } @@ -30,7 +33,7 @@ export class CountQueuingStrategy implements QueuingStrategy { this.highWaterMark = options.highWaterMark; } - size() { + size(): number { return 1; } } diff --git a/cli/js/streams/transform-internals.ts b/cli/js/streams/transform-internals.ts index dd481dc8bd65e6..e659a4965bbb7f 100644 --- a/cli/js/streams/transform-internals.ts +++ b/cli/js/streams/transform-internals.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as rs from "./readable-internals.ts"; import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; @@ -102,17 +105,17 @@ export function initializeTransformStream( writableSizeAlgorithm: QueuingStrategySizeCallback, readableHighWaterMark: number, readableSizeAlgorithm: QueuingStrategySizeCallback -) { - const startAlgorithm = function() { +): void { + const startAlgorithm = function(): Promise { return startPromise; }; - const writeAlgorithm = function(chunk: InputType) { + const writeAlgorithm = function(chunk: InputType): Promise { return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); }; - const abortAlgorithm = function(reason: shared.ErrorResult) { + const abortAlgorithm = function(reason: shared.ErrorResult): Promise { return transformStreamDefaultSinkAbortAlgorithm(stream, reason); }; - const closeAlgorithm = function() { + const closeAlgorithm = function(): Promise { return transformStreamDefaultSinkCloseAlgorithm(stream); }; stream[writable_] = createWritableStream( @@ -124,10 +127,12 @@ export function initializeTransformStream( writableSizeAlgorithm ); - const pullAlgorithm = function() { + const pullAlgorithm = function(): Promise { return transformStreamDefaultSourcePullAlgorithm(stream); }; - const cancelAlgorithm = function(reason: shared.ErrorResult) { + const cancelAlgorithm = function( + reason: shared.ErrorResult + ): Promise { transformStreamErrorWritableAndUnblockWrite(stream, reason); return Promise.resolve(undefined); }; @@ -148,7 +153,7 @@ export function initializeTransformStream( export function transformStreamError( stream: TransformStream, error: shared.ErrorResult -) { +): void { rs.readableStreamDefaultControllerError( stream[readable_][ rs.readableStreamController_ @@ -161,7 +166,10 @@ export function transformStreamError( export function transformStreamErrorWritableAndUnblockWrite< InputType, OutputType ->(stream: TransformStream, error: shared.ErrorResult) { +>( + stream: TransformStream, + error: shared.ErrorResult +): void { transformStreamDefaultControllerClearAlgorithms( stream[transformStreamController_] ); @@ -177,7 +185,7 @@ export function transformStreamErrorWritableAndUnblockWrite< export function transformStreamSetBackpressure( stream: TransformStream, backpressure: boolean -) { +): void { // Assert: stream.[[backpressure]] is not backpressure. if (stream[backpressure_] !== undefined) { stream[backpressureChangePromise_]!.resolve(undefined); @@ -202,7 +210,7 @@ export function setUpTransformStreamDefaultController( controller: TransformStreamDefaultController, transformAlgorithm: TransformAlgorithm, flushAlgorithm: FlushAlgorithm -) { +): void { // Assert: ! IsTransformStream(stream) is true. // Assert: stream.[[transformStreamController]] is undefined. controller[controlledTransformStream_] = stream; @@ -214,7 +222,7 @@ export function setUpTransformStreamDefaultController( export function transformStreamDefaultControllerClearAlgorithms< InputType, OutputType ->(controller: TransformStreamDefaultController) { +>(controller: TransformStreamDefaultController): void { // Use ! assertions to override type check here, this way we don't // have to perform type checks/assertions everywhere else. controller[transformAlgorithm_] = undefined!; @@ -224,7 +232,7 @@ export function transformStreamDefaultControllerClearAlgorithms< export function transformStreamDefaultControllerEnqueue( controller: TransformStreamDefaultController, chunk: OutputType -) { +): void { const stream = controller[controlledTransformStream_]; const readableController = stream[readable_][ rs.readableStreamController_ @@ -252,7 +260,7 @@ export function transformStreamDefaultControllerEnqueue( export function transformStreamDefaultControllerError( controller: TransformStreamDefaultController, error: shared.ErrorResult -) { +): void { transformStreamError(controller[controlledTransformStream_], error); } @@ -262,7 +270,7 @@ export function transformStreamDefaultControllerPerformTransform< >( controller: TransformStreamDefaultController, chunk: InputType -) { +): Promise { const transformPromise = controller[transformAlgorithm_](chunk); return transformPromise.catch(error => { transformStreamError(controller[controlledTransformStream_], error); @@ -273,7 +281,7 @@ export function transformStreamDefaultControllerPerformTransform< export function transformStreamDefaultControllerTerminate< InputType, OutputType ->(controller: TransformStreamDefaultController) { +>(controller: TransformStreamDefaultController): void { const stream = controller[controlledTransformStream_]; const readableController = stream[readable_][ rs.readableStreamController_ @@ -290,7 +298,7 @@ export function transformStreamDefaultControllerTerminate< export function transformStreamDefaultSinkWriteAlgorithm( stream: TransformStream, chunk: InputType -) { +): Promise { // Assert: stream.[[writable]].[[state]] is "writable". const controller = stream[transformStreamController_]; if (stream[backpressure_]) { @@ -315,14 +323,14 @@ export function transformStreamDefaultSinkWriteAlgorithm( export function transformStreamDefaultSinkAbortAlgorithm( stream: TransformStream, reason: shared.ErrorResult -) { +): Promise { transformStreamError(stream, reason); return Promise.resolve(undefined); } export function transformStreamDefaultSinkCloseAlgorithm( stream: TransformStream -) { +): Promise { const readable = stream[readable_]; const controller = stream[transformStreamController_]; const flushPromise = controller[flushAlgorithm_](); @@ -354,7 +362,7 @@ export function transformStreamDefaultSinkCloseAlgorithm( export function transformStreamDefaultSourcePullAlgorithm< InputType, OutputType ->(stream: TransformStream) { +>(stream: TransformStream): Promise { // Assert: stream.[[backpressure]] is true. // Assert: stream.[[backpressureChangePromise]] is not undefined. transformStreamSetBackpressure(stream, false); diff --git a/cli/js/streams/transform-stream.ts b/cli/js/streams/transform-stream.ts index 73ebaf50dddf46..dfd241e143abed 100644 --- a/cli/js/streams/transform-stream.ts +++ b/cli/js/streams/transform-stream.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as rs from "./readable-internals.ts"; import * as ws from "./writable-internals.ts"; import * as ts from "./transform-internals.ts"; @@ -99,7 +102,7 @@ function setUpTransformStreamDefaultControllerFromTransformer< >( stream: TransformStream, transformer: ts.Transformer -) { +): void { const controller = Object.create( TransformStreamDefaultController.prototype ) as TransformStreamDefaultController; @@ -112,11 +115,11 @@ function setUpTransformStreamDefaultControllerFromTransformer< "`transform` field of the transformer must be a function" ); } - transformAlgorithm = (chunk: InputType) => + transformAlgorithm = (chunk: InputType): Promise => shared.promiseCall(transformMethod, transformer, [chunk, controller]); } else { // use identity transform - transformAlgorithm = function(chunk: InputType) { + transformAlgorithm = function(chunk: InputType): Promise { try { // OutputType and InputType are the same here ts.transformStreamDefaultControllerEnqueue( diff --git a/cli/js/streams/writable-internals.ts b/cli/js/streams/writable-internals.ts index e796a9d16be928..df0f7a0b8e3b33 100644 --- a/cli/js/streams/writable-internals.ts +++ b/cli/js/streams/writable-internals.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as shared from "./shared-internals.ts"; import * as q from "./queue-mixin.ts"; @@ -147,7 +150,7 @@ export declare class WritableStream { export function initializeWritableStream( stream: WritableStream -) { +): void { stream[shared.state_] = "writable"; stream[shared.storedError_] = undefined; stream[writer_] = undefined; @@ -169,14 +172,14 @@ export function isWritableStream(value: unknown): value is WritableStream { export function isWritableStreamLocked( stream: WritableStream -) { +): boolean { return stream[writer_] !== undefined; } export function writableStreamAbort( stream: WritableStream, reason: shared.ErrorResult -) { +): Promise { const state = stream[shared.state_]; if (state === "closed" || state === "errored") { return Promise.resolve(undefined); @@ -210,7 +213,7 @@ export function writableStreamAbort( export function writableStreamAddWriteRequest( stream: WritableStream -) { +): Promise { // Assert: !IsWritableStreamLocked(stream) is true. // Assert: stream.[[state]] is "writable". const writePromise = shared.createControlledPromise(); @@ -221,7 +224,7 @@ export function writableStreamAddWriteRequest( export function writableStreamDealWithRejection( stream: WritableStream, error: shared.ErrorResult -) { +): void { const state = stream[shared.state_]; if (state === "writable") { writableStreamStartErroring(stream, error); @@ -234,7 +237,7 @@ export function writableStreamDealWithRejection( export function writableStreamStartErroring( stream: WritableStream, reason: shared.ErrorResult -) { +): void { // Assert: stream.[[storedError]] is undefined. // Assert: stream.[[state]] is "writable". const controller = stream[writableStreamController_]!; @@ -255,7 +258,7 @@ export function writableStreamStartErroring( export function writableStreamFinishErroring( stream: WritableStream -) { +): void { // Assert: stream.[[state]] is "erroring". // Assert: writableStreamHasOperationMarkedInFlight(stream) is false. stream[shared.state_] = "errored"; @@ -293,7 +296,7 @@ export function writableStreamFinishErroring( export function writableStreamFinishInFlightWrite( stream: WritableStream -) { +): void { // Assert: stream.[[inFlightWriteRequest]] is not undefined. stream[inFlightWriteRequest_]!.resolve(undefined); stream[inFlightWriteRequest_] = undefined; @@ -302,7 +305,7 @@ export function writableStreamFinishInFlightWrite( export function writableStreamFinishInFlightWriteWithError( stream: WritableStream, error: shared.ErrorResult -) { +): void { // Assert: stream.[[inFlightWriteRequest]] is not undefined. stream[inFlightWriteRequest_]!.reject(error); stream[inFlightWriteRequest_] = undefined; @@ -312,7 +315,7 @@ export function writableStreamFinishInFlightWriteWithError( export function writableStreamFinishInFlightClose( stream: WritableStream -) { +): void { // Assert: stream.[[inFlightCloseRequest]] is not undefined. stream[inFlightCloseRequest_]!.resolve(undefined); stream[inFlightCloseRequest_] = undefined; @@ -337,7 +340,7 @@ export function writableStreamFinishInFlightClose( export function writableStreamFinishInFlightCloseWithError( stream: WritableStream, error: shared.ErrorResult -) { +): void { // Assert: stream.[[inFlightCloseRequest]] is not undefined. stream[inFlightCloseRequest_]!.reject(error); stream[inFlightCloseRequest_] = undefined; @@ -351,7 +354,7 @@ export function writableStreamFinishInFlightCloseWithError( export function writableStreamCloseQueuedOrInFlight( stream: WritableStream -) { +): boolean { return ( stream[closeRequest_] !== undefined || stream[inFlightCloseRequest_] !== undefined @@ -360,7 +363,7 @@ export function writableStreamCloseQueuedOrInFlight( export function writableStreamHasOperationMarkedInFlight( stream: WritableStream -) { +): boolean { return ( stream[inFlightWriteRequest_] !== undefined || stream[inFlightCloseRequest_] !== undefined @@ -369,7 +372,7 @@ export function writableStreamHasOperationMarkedInFlight( export function writableStreamMarkCloseRequestInFlight( stream: WritableStream -) { +): void { // Assert: stream.[[inFlightCloseRequest]] is undefined. // Assert: stream.[[closeRequest]] is not undefined. stream[inFlightCloseRequest_] = stream[closeRequest_]; @@ -378,7 +381,7 @@ export function writableStreamMarkCloseRequestInFlight( export function writableStreamMarkFirstWriteRequestInFlight( stream: WritableStream -) { +): void { // Assert: stream.[[inFlightWriteRequest]] is undefined. // Assert: stream.[[writeRequests]] is not empty. const writeRequest = stream[writeRequests_].shift()!; @@ -387,7 +390,7 @@ export function writableStreamMarkFirstWriteRequestInFlight( export function writableStreamRejectCloseAndClosedPromiseIfNeeded( stream: WritableStream -) { +): void { // Assert: stream.[[state]] is "errored". const closeRequest = stream[closeRequest_]; if (closeRequest !== undefined) { @@ -405,7 +408,7 @@ export function writableStreamRejectCloseAndClosedPromiseIfNeeded( export function writableStreamUpdateBackpressure( stream: WritableStream, backpressure: boolean -) { +): void { // Assert: stream.[[state]] is "writable". // Assert: !WritableStreamCloseQueuedOrInFlight(stream) is false. const writer = stream[writer_]; @@ -433,7 +436,7 @@ export function isWritableStreamDefaultWriter( export function writableStreamDefaultWriterAbort( writer: WritableStreamDefaultWriter, reason: shared.ErrorResult -) { +): Promise { const stream = writer[ownerWritableStream_]!; // Assert: stream is not undefined. return writableStreamAbort(stream, reason); @@ -441,7 +444,7 @@ export function writableStreamDefaultWriterAbort( export function writableStreamDefaultWriterClose( writer: WritableStreamDefaultWriter -) { +): Promise { const stream = writer[ownerWritableStream_]!; // Assert: stream is not undefined. const state = stream[shared.state_]; @@ -463,7 +466,7 @@ export function writableStreamDefaultWriterClose( export function writableStreamDefaultWriterCloseWithErrorPropagation( writer: WritableStreamDefaultWriter -) { +): Promise { const stream = writer[ownerWritableStream_]!; // Assert: stream is not undefined. const state = stream[shared.state_]; @@ -479,7 +482,10 @@ export function writableStreamDefaultWriterCloseWithErrorPropagation( export function writableStreamDefaultWriterEnsureClosedPromiseRejected< InputType ->(writer: WritableStreamDefaultWriter, error: shared.ErrorResult) { +>( + writer: WritableStreamDefaultWriter, + error: shared.ErrorResult +): void { const closedPromise = writer[closedPromise_]; if (closedPromise.state === shared.ControlledPromiseState.Pending) { closedPromise.reject(error); @@ -492,7 +498,10 @@ export function writableStreamDefaultWriterEnsureClosedPromiseRejected< export function writableStreamDefaultWriterEnsureReadyPromiseRejected< InputType ->(writer: WritableStreamDefaultWriter, error: shared.ErrorResult) { +>( + writer: WritableStreamDefaultWriter, + error: shared.ErrorResult +): void { const readyPromise = writer[readyPromise_]; if (readyPromise.state === shared.ControlledPromiseState.Pending) { readyPromise.reject(error); @@ -505,7 +514,7 @@ export function writableStreamDefaultWriterEnsureReadyPromiseRejected< export function writableStreamDefaultWriterGetDesiredSize( writer: WritableStreamDefaultWriter -) { +): number | null { const stream = writer[ownerWritableStream_]!; const state = stream[shared.state_]; if (state === "errored" || state === "erroring") { @@ -521,7 +530,7 @@ export function writableStreamDefaultWriterGetDesiredSize( export function writableStreamDefaultWriterRelease( writer: WritableStreamDefaultWriter -) { +): void { const stream = writer[ownerWritableStream_]!; // Assert: stream is not undefined. // Assert: stream.[[writer]] is writer. @@ -535,7 +544,7 @@ export function writableStreamDefaultWriterRelease( export function writableStreamDefaultWriterWrite( writer: WritableStreamDefaultWriter, chunk: InputType -) { +): Promise { const stream = writer[ownerWritableStream_]!; // Assert: stream is not undefined. const controller = stream[writableStreamController_]!; @@ -575,7 +584,7 @@ export function setUpWritableStreamDefaultController( abortAlgorithm: AbortAlgorithm, highWaterMark: number, sizeAlgorithm: QueuingStrategySizeCallback -) { +): void { if (!isWritableStream(stream)) { throw new TypeError(); } @@ -623,7 +632,7 @@ export function isWritableStreamDefaultController( export function writableStreamDefaultControllerClearAlgorithms( controller: WritableStreamDefaultController -) { +): void { // Use ! assertions to override type check here, this way we don't // have to perform type checks/assertions everywhere else. controller[writeAlgorithm_] = undefined!; @@ -634,7 +643,7 @@ export function writableStreamDefaultControllerClearAlgorithms( export function writableStreamDefaultControllerClose( controller: WritableStreamDefaultController -) { +): void { q.enqueueValueWithSize(controller, "close", 0); writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); } @@ -642,7 +651,7 @@ export function writableStreamDefaultControllerClose( export function writableStreamDefaultControllerGetChunkSize( controller: WritableStreamDefaultController, chunk: InputType -) { +): number { let chunkSize: number; try { chunkSize = controller[strategySizeAlgorithm_](chunk); @@ -655,7 +664,7 @@ export function writableStreamDefaultControllerGetChunkSize( export function writableStreamDefaultControllerGetDesiredSize( controller: WritableStreamDefaultController -) { +): number { return controller[strategyHWM_] - controller[q.queueTotalSize_]; } @@ -663,7 +672,7 @@ export function writableStreamDefaultControllerWrite( controller: WritableStreamDefaultController, chunk: InputType, chunkSize: number -) { +): void { try { q.enqueueValueWithSize(controller, { chunk }, chunkSize); } catch (error) { @@ -685,7 +694,7 @@ export function writableStreamDefaultControllerWrite( export function writableStreamDefaultControllerAdvanceQueueIfNeeded( controller: WritableStreamDefaultController -) { +): void { if (!controller[started_]) { return; } @@ -715,7 +724,7 @@ export function writableStreamDefaultControllerAdvanceQueueIfNeeded( export function writableStreamDefaultControllerErrorIfNeeded( controller: WritableStreamDefaultController, error: shared.ErrorResult -) { +): void { if (controller[controlledWritableStream_][shared.state_] === "writable") { writableStreamDefaultControllerError(controller, error); } @@ -723,7 +732,7 @@ export function writableStreamDefaultControllerErrorIfNeeded( export function writableStreamDefaultControllerProcessClose( controller: WritableStreamDefaultController -) { +): void { const stream = controller[controlledWritableStream_]; writableStreamMarkCloseRequestInFlight(stream); q.dequeueValue(controller); @@ -743,7 +752,7 @@ export function writableStreamDefaultControllerProcessClose( export function writableStreamDefaultControllerProcessWrite( controller: WritableStreamDefaultController, chunk: InputType -) { +): void { const stream = controller[controlledWritableStream_]; writableStreamMarkFirstWriteRequestInFlight(stream); controller[writeAlgorithm_](chunk).then( @@ -774,7 +783,7 @@ export function writableStreamDefaultControllerProcessWrite( export function writableStreamDefaultControllerGetBackpressure( controller: WritableStreamDefaultController -) { +): boolean { const desiredSize = writableStreamDefaultControllerGetDesiredSize(controller); return desiredSize <= 0; } @@ -782,7 +791,7 @@ export function writableStreamDefaultControllerGetBackpressure( export function writableStreamDefaultControllerError( controller: WritableStreamDefaultController, error: shared.ErrorResult -) { +): void { const stream = controller[controlledWritableStream_]; // Assert: stream.[[state]] is "writable". writableStreamDefaultControllerClearAlgorithms(controller); diff --git a/cli/js/streams/writable-stream-default-controller.ts b/cli/js/streams/writable-stream-default-controller.ts index c2f065bc0b1232..9a3886d21c9e7b 100644 --- a/cli/js/streams/writable-stream-default-controller.ts +++ b/cli/js/streams/writable-stream-default-controller.ts @@ -8,6 +8,9 @@ * https://github.com/stardazed/sd-streams */ +/* eslint-disable @typescript-eslint/no-explicit-any */ +// TODO reenable this lint here + import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; import * as q from "./queue-mixin.ts"; @@ -31,7 +34,7 @@ export class WritableStreamDefaultController throw new TypeError(); } - error(e?: shared.ErrorResult) { + error(e?: shared.ErrorResult): void { if (!ws.isWritableStreamDefaultController(this)) { throw new TypeError(); } @@ -42,13 +45,13 @@ export class WritableStreamDefaultController ws.writableStreamDefaultControllerError(this, e); } - [ws.abortSteps_](reason: shared.ErrorResult) { + [ws.abortSteps_](reason: shared.ErrorResult): Promise { const result = this[ws.abortAlgorithm_](reason); ws.writableStreamDefaultControllerClearAlgorithms(this); return result; } - [ws.errorSteps_]() { + [ws.errorSteps_](): void { q.resetQueue(this); } } @@ -60,13 +63,13 @@ export function setUpWritableStreamDefaultControllerFromUnderlyingSink< underlyingSink: ws.WritableStreamSink, highWaterMark: number, sizeAlgorithm: QueuingStrategySizeCallback -) { +): void { // Assert: underlyingSink is not undefined. const controller = Object.create( WritableStreamDefaultController.prototype ) as WritableStreamDefaultController; - const startAlgorithm = function() { + const startAlgorithm = function(): any { return shared.invokeOrNoop(underlyingSink, "start", [controller]); }; const writeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( diff --git a/cli/js/streams/writable-stream.ts b/cli/js/streams/writable-stream.ts index 42de9b9681311f..b6e4dd4ad1c34e 100644 --- a/cli/js/streams/writable-stream.ts +++ b/cli/js/streams/writable-stream.ts @@ -87,12 +87,12 @@ export function createWritableStream( abortAlgorithm: ws.AbortAlgorithm, highWaterMark?: number, sizeAlgorithm?: QueuingStrategySizeCallback -) { +): WritableStream { if (highWaterMark === undefined) { highWaterMark = 1; } if (sizeAlgorithm === undefined) { - sizeAlgorithm = () => 1; + sizeAlgorithm = (): number => 1; } // Assert: ! IsNonNegativeNumber(highWaterMark) is true. From e9d92fe0f6d71baafb01ff8a2550c4bd214eb87d Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Sun, 27 Oct 2019 16:38:23 -0400 Subject: [PATCH 08/10] make sure that error kinds are uniq, and duplicated properly between msg.rs and errors.ts --- cli/js/errors.ts | 4 ++-- cli/msg.rs | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/cli/js/errors.ts b/cli/js/errors.ts index 6218934a8df2ee..921978de1f937a 100644 --- a/cli/js/errors.ts +++ b/cli/js/errors.ts @@ -79,6 +79,6 @@ export enum ErrorKind { TypeError = 51, /** TODO These are DomException Types, and should be moved there when it exists */ - DataCloneError = 51, - AbortError = 52 + DataCloneError = 52, + AbortError = 53 } diff --git a/cli/msg.rs b/cli/msg.rs index 206da76949beb5..2ddac3b8d3000b 100644 --- a/cli/msg.rs +++ b/cli/msg.rs @@ -58,6 +58,10 @@ pub enum ErrorKind { Diagnostic = 49, JSError = 50, TypeError = 51, + + /** TODO These are DomException Types, and should be moved there when it exists */ + DataCloneError = 52, + AbortError = 53, } // Warning! The values in this enum are duplicated in js/compiler.ts From 94129404be0bb3a71e358e380c8c6752822e62be Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 28 Oct 2019 11:25:52 -0400 Subject: [PATCH 09/10] Comment out unused bits of cli/js/streams/mod.ts --- cli/js/streams/mod.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cli/js/streams/mod.ts b/cli/js/streams/mod.ts index 97386b1d2ee3fd..5389aaf6d9db5a 100644 --- a/cli/js/streams/mod.ts +++ b/cli/js/streams/mod.ts @@ -9,6 +9,7 @@ */ export { SDReadableStream as ReadableStream } from "./readable-stream.ts"; +/* TODO The following are currently unused so not exported for clarity. export { WritableStream } from "./writable-stream.ts"; export { TransformStream } from "./transform-stream.ts"; @@ -16,3 +17,4 @@ export { ByteLengthQueuingStrategy, CountQueuingStrategy } from "./strategies.ts"; +*/ From b75f3fa3063d2d3cc429f562ebb8996c772cf939 Mon Sep 17 00:00:00 2001 From: Nick Stott Date: Mon, 28 Oct 2019 12:15:20 -0400 Subject: [PATCH 10/10] comment out unused code from WritableStreams, and TransformStream --- cli/js/dom_types.ts | 46 +- cli/js/errors.ts | 5 +- cli/js/streams/pipe-to.ts | 473 ++--- cli/js/streams/readable-internals.ts | 9 +- cli/js/streams/readable-stream.ts | 5 +- cli/js/streams/transform-internals.ts | 741 ++++---- .../transform-stream-default-controller.ts | 115 +- cli/js/streams/transform-stream.ts | 267 +-- cli/js/streams/writable-internals.ts | 1599 +++++++++-------- .../writable-stream-default-controller.ts | 179 +- .../streams/writable-stream-default-writer.ts | 245 +-- cli/js/streams/writable-stream.ts | 213 +-- cli/msg.rs | 3 +- 13 files changed, 1953 insertions(+), 1947 deletions(-) diff --git a/cli/js/dom_types.ts b/cli/js/dom_types.ts index 991251eda8595e..0b654d750c1092 100644 --- a/cli/js/dom_types.ts +++ b/cli/js/dom_types.ts @@ -337,19 +337,6 @@ export interface ReadableStream { tee(): ReadableStream[]; } -export interface WritableStream { - readonly locked: boolean; - abort(reason?: any): Promise; - getWriter(): WritableStreamDefaultWriter; -} - -export interface PipeOptions { - preventAbort?: boolean; - preventCancel?: boolean; - preventClose?: boolean; - signal?: AbortSignal; -} - export interface UnderlyingSource { cancel?: ReadableStreamErrorCallback; pull?: ReadableStreamDefaultControllerCallback; @@ -365,14 +352,6 @@ export interface UnderlyingByteSource { type: "bytes"; } -export interface UnderlyingSink { - abort?: WritableStreamErrorCallback; - close?: WritableStreamDefaultControllerCloseCallback; - start?: WritableStreamDefaultControllerStartCallback; - type?: undefined; - write?: WritableStreamDefaultControllerWriteCallback; -} - export interface ReadableStreamReader { cancel(reason?: any): Promise; read(): Promise; @@ -411,6 +390,29 @@ export interface ReadableStreamBYOBRequest { respond(bytesWritten: number): void; respondWithNewView(view: ArrayBufferView): void; } +/* TODO reenable these interfaces. These are needed to enable WritableStreams in js/streams/ +export interface WritableStream { + readonly locked: boolean; + abort(reason?: any): Promise; + getWriter(): WritableStreamDefaultWriter; +} + +TODO reenable these interfaces. These are needed to enable WritableStreams in js/streams/ +export interface UnderlyingSink { + abort?: WritableStreamErrorCallback; + close?: WritableStreamDefaultControllerCloseCallback; + start?: WritableStreamDefaultControllerStartCallback; + type?: undefined; + write?: WritableStreamDefaultControllerWriteCallback; +} + +export interface PipeOptions { + preventAbort?: boolean; + preventCancel?: boolean; + preventClose?: boolean; + signal?: AbortSignal; +} + export interface WritableStreamDefaultWriter { readonly closed: Promise; @@ -443,7 +445,7 @@ export interface WritableStreamDefaultControllerWriteCallback { export interface WritableStreamDefaultController { error(error?: any): void; } - +*/ export interface QueuingStrategy { highWaterMark?: number; size?: QueuingStrategySizeCallback; diff --git a/cli/js/errors.ts b/cli/js/errors.ts index 921978de1f937a..286a004e419332 100644 --- a/cli/js/errors.ts +++ b/cli/js/errors.ts @@ -78,7 +78,6 @@ export enum ErrorKind { JSError = 50, TypeError = 51, - /** TODO These are DomException Types, and should be moved there when it exists */ - DataCloneError = 52, - AbortError = 53 + /** TODO this is a DomException type, and should be moved out of here when possible */ + DataCloneError = 52 } diff --git a/cli/js/streams/pipe-to.ts b/cli/js/streams/pipe-to.ts index 18d2bd4f82f872..3764e605b88e43 100644 --- a/cli/js/streams/pipe-to.ts +++ b/cli/js/streams/pipe-to.ts @@ -1,236 +1,237 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT - -/** - * streams/pipe-to - pipeTo algorithm implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ - -/* eslint-disable @typescript-eslint/no-explicit-any */ -// TODO reenable this lint here - -import * as rs from "./readable-internals.ts"; -import * as ws from "./writable-internals.ts"; -import * as shared from "./shared-internals.ts"; - -import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts"; -import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; -import { PipeOptions } from "../dom_types.ts"; -import { DenoError, ErrorKind } from "../errors.ts"; - -// add a wrapper to handle falsy rejections -interface ErrorWrapper { - actualError: shared.ErrorResult; -} - -export function pipeTo( - source: rs.SDReadableStream, - dest: ws.WritableStream, - options: PipeOptions -): Promise { - const preventClose = !!options.preventClose; - const preventAbort = !!options.preventAbort; - const preventCancel = !!options.preventCancel; - const signal = options.signal; - - let shuttingDown = false; - let latestWrite = Promise.resolve(); - const promise = shared.createControlledPromise(); - - // If IsReadableByteStreamController(this.[[readableStreamController]]) is true, let reader be either ! AcquireReadableStreamBYOBReader(this) or ! AcquireReadableStreamDefaultReader(this), at the user agent’s discretion. - // Otherwise, let reader be ! AcquireReadableStreamDefaultReader(this). - const reader = new ReadableStreamDefaultReader(source); - const writer = new WritableStreamDefaultWriter(dest); - - let abortAlgorithm: () => any; - if (signal !== undefined) { - abortAlgorithm = (): void => { - // TODO this should be a DOMException, - // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38 - const error = new DenoError(ErrorKind.AbortError, "Aborted"); - const actions: Array<() => Promise> = []; - if (preventAbort === false) { - actions.push(() => { - if (dest[shared.state_] === "writable") { - return ws.writableStreamAbort(dest, error); - } - return Promise.resolve(); - }); - } - if (preventCancel === false) { - actions.push(() => { - if (source[shared.state_] === "readable") { - return rs.readableStreamCancel(source, error); - } - return Promise.resolve(); - }); - } - shutDown( - () => { - return Promise.all(actions.map(a => a())).then(_ => undefined); - }, - { actualError: error } - ); - }; - - if (signal.aborted === true) { - abortAlgorithm(); - } else { - signal.addEventListener("abort", abortAlgorithm); - } - } - - function onStreamErrored( - stream: rs.SDReadableStream | ws.WritableStream, - promise: Promise, - action: (error: shared.ErrorResult) => void - ): void { - if (stream[shared.state_] === "errored") { - action(stream[shared.storedError_]); - } else { - promise.catch(action); - } - } - - function onStreamClosed( - stream: rs.SDReadableStream | ws.WritableStream, - promise: Promise, - action: () => void - ): void { - if (stream[shared.state_] === "closed") { - action(); - } else { - promise.then(action); - } - } - - onStreamErrored(source, reader[rs.closedPromise_].promise, error => { - if (!preventAbort) { - shutDown(() => ws.writableStreamAbort(dest, error), { - actualError: error - }); - } else { - shutDown(undefined, { actualError: error }); - } - }); - - onStreamErrored(dest, writer[ws.closedPromise_].promise, error => { - if (!preventCancel) { - shutDown(() => rs.readableStreamCancel(source, error), { - actualError: error - }); - } else { - shutDown(undefined, { actualError: error }); - } - }); - - onStreamClosed(source, reader[rs.closedPromise_].promise, () => { - if (!preventClose) { - shutDown(() => - ws.writableStreamDefaultWriterCloseWithErrorPropagation(writer) - ); - } else { - shutDown(); - } - }); - - if ( - ws.writableStreamCloseQueuedOrInFlight(dest) || - dest[shared.state_] === "closed" - ) { - // Assert: no chunks have been read or written. - const destClosed = new TypeError(); - if (!preventCancel) { - shutDown(() => rs.readableStreamCancel(source, destClosed), { - actualError: destClosed - }); - } else { - shutDown(undefined, { actualError: destClosed }); - } - } - - function awaitLatestWrite(): Promise { - const curLatestWrite = latestWrite; - return latestWrite.then(() => - curLatestWrite === latestWrite ? undefined : awaitLatestWrite() - ); - } - - function flushRemainder(): Promise | undefined { - if ( - dest[shared.state_] === "writable" && - !ws.writableStreamCloseQueuedOrInFlight(dest) - ) { - return awaitLatestWrite(); - } else { - return undefined; - } - } - - function shutDown(action?: () => Promise, error?: ErrorWrapper): void { - if (shuttingDown) { - return; - } - shuttingDown = true; - - if (action === undefined) { - action = (): Promise => Promise.resolve(); - } - - function finishShutDown(): void { - action!().then( - _ => finalize(error), - newError => finalize({ actualError: newError }) - ); - } - - const flushWait = flushRemainder(); - if (flushWait) { - flushWait.then(finishShutDown); - } else { - finishShutDown(); - } - } - - function finalize(error?: ErrorWrapper): void { - ws.writableStreamDefaultWriterRelease(writer); - rs.readableStreamReaderGenericRelease(reader); - if (signal && abortAlgorithm) { - signal.removeEventListener("abort", abortAlgorithm); - } - if (error) { - promise.reject(error.actualError); - } else { - promise.resolve(undefined); - } - } - - function next(): Promise | undefined { - if (shuttingDown) { - return; - } - - writer[ws.readyPromise_].promise.then(() => { - rs.readableStreamDefaultReaderRead(reader).then( - ({ value, done }) => { - if (done) { - return; - } - latestWrite = ws - .writableStreamDefaultWriterWrite(writer, value!) - .catch(() => {}); - next(); - }, - _error => { - latestWrite = Promise.resolve(); - } - ); - }); - } - - next(); - - return promise.promise; -} +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +// /** +// * streams/pipe-to - pipeTo algorithm implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ + +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // TODO reenable this lint here + +// import * as rs from "./readable-internals.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as shared from "./shared-internals.ts"; + +// import { ReadableStreamDefaultReader } from "./readable-stream-default-reader.ts"; +// import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; +// import { PipeOptions } from "../dom_types.ts"; +// import { DenoError, ErrorKind } from "../errors.ts"; + +// // add a wrapper to handle falsy rejections +// interface ErrorWrapper { +// actualError: shared.ErrorResult; +// } + +// export function pipeTo( +// source: rs.SDReadableStream, +// dest: ws.WritableStream, +// options: PipeOptions +// ): Promise { +// const preventClose = !!options.preventClose; +// const preventAbort = !!options.preventAbort; +// const preventCancel = !!options.preventCancel; +// const signal = options.signal; + +// let shuttingDown = false; +// let latestWrite = Promise.resolve(); +// const promise = shared.createControlledPromise(); + +// // If IsReadableByteStreamController(this.[[readableStreamController]]) is true, let reader be either ! AcquireReadableStreamBYOBReader(this) or ! AcquireReadableStreamDefaultReader(this), at the user agent’s discretion. +// // Otherwise, let reader be ! AcquireReadableStreamDefaultReader(this). +// const reader = new ReadableStreamDefaultReader(source); +// const writer = new WritableStreamDefaultWriter(dest); + +// let abortAlgorithm: () => any; +// if (signal !== undefined) { +// abortAlgorithm = (): void => { +// // TODO this should be a DOMException, +// // https://github.com/stardazed/sd-streams/blob/master/packages/streams/src/pipe-to.ts#L38 +// const error = new DenoError(ErrorKind.AbortError, "Aborted"); +// const actions: Array<() => Promise> = []; +// if (preventAbort === false) { +// actions.push(() => { +// if (dest[shared.state_] === "writable") { +// return ws.writableStreamAbort(dest, error); +// } +// return Promise.resolve(); +// }); +// } +// if (preventCancel === false) { +// actions.push(() => { +// if (source[shared.state_] === "readable") { +// return rs.readableStreamCancel(source, error); +// } +// return Promise.resolve(); +// }); +// } +// shutDown( +// () => { +// return Promise.all(actions.map(a => a())).then(_ => undefined); +// }, +// { actualError: error } +// ); +// }; + +// if (signal.aborted === true) { +// abortAlgorithm(); +// } else { +// signal.addEventListener("abort", abortAlgorithm); +// } +// } + +// function onStreamErrored( +// stream: rs.SDReadableStream | ws.WritableStream, +// promise: Promise, +// action: (error: shared.ErrorResult) => void +// ): void { +// if (stream[shared.state_] === "errored") { +// action(stream[shared.storedError_]); +// } else { +// promise.catch(action); +// } +// } + +// function onStreamClosed( +// stream: rs.SDReadableStream | ws.WritableStream, +// promise: Promise, +// action: () => void +// ): void { +// if (stream[shared.state_] === "closed") { +// action(); +// } else { +// promise.then(action); +// } +// } + +// onStreamErrored(source, reader[rs.closedPromise_].promise, error => { +// if (!preventAbort) { +// shutDown(() => ws.writableStreamAbort(dest, error), { +// actualError: error +// }); +// } else { +// shutDown(undefined, { actualError: error }); +// } +// }); + +// onStreamErrored(dest, writer[ws.closedPromise_].promise, error => { +// if (!preventCancel) { +// shutDown(() => rs.readableStreamCancel(source, error), { +// actualError: error +// }); +// } else { +// shutDown(undefined, { actualError: error }); +// } +// }); + +// onStreamClosed(source, reader[rs.closedPromise_].promise, () => { +// if (!preventClose) { +// shutDown(() => +// ws.writableStreamDefaultWriterCloseWithErrorPropagation(writer) +// ); +// } else { +// shutDown(); +// } +// }); + +// if ( +// ws.writableStreamCloseQueuedOrInFlight(dest) || +// dest[shared.state_] === "closed" +// ) { +// // Assert: no chunks have been read or written. +// const destClosed = new TypeError(); +// if (!preventCancel) { +// shutDown(() => rs.readableStreamCancel(source, destClosed), { +// actualError: destClosed +// }); +// } else { +// shutDown(undefined, { actualError: destClosed }); +// } +// } + +// function awaitLatestWrite(): Promise { +// const curLatestWrite = latestWrite; +// return latestWrite.then(() => +// curLatestWrite === latestWrite ? undefined : awaitLatestWrite() +// ); +// } + +// function flushRemainder(): Promise | undefined { +// if ( +// dest[shared.state_] === "writable" && +// !ws.writableStreamCloseQueuedOrInFlight(dest) +// ) { +// return awaitLatestWrite(); +// } else { +// return undefined; +// } +// } + +// function shutDown(action?: () => Promise, error?: ErrorWrapper): void { +// if (shuttingDown) { +// return; +// } +// shuttingDown = true; + +// if (action === undefined) { +// action = (): Promise => Promise.resolve(); +// } + +// function finishShutDown(): void { +// action!().then( +// _ => finalize(error), +// newError => finalize({ actualError: newError }) +// ); +// } + +// const flushWait = flushRemainder(); +// if (flushWait) { +// flushWait.then(finishShutDown); +// } else { +// finishShutDown(); +// } +// } + +// function finalize(error?: ErrorWrapper): void { +// ws.writableStreamDefaultWriterRelease(writer); +// rs.readableStreamReaderGenericRelease(reader); +// if (signal && abortAlgorithm) { +// signal.removeEventListener("abort", abortAlgorithm); +// } +// if (error) { +// promise.reject(error.actualError); +// } else { +// promise.resolve(undefined); +// } +// } + +// function next(): Promise | undefined { +// if (shuttingDown) { +// return; +// } + +// writer[ws.readyPromise_].promise.then(() => { +// rs.readableStreamDefaultReaderRead(reader).then( +// ({ value, done }) => { +// if (done) { +// return; +// } +// latestWrite = ws +// .writableStreamDefaultWriterWrite(writer, value!) +// .catch(() => {}); +// next(); +// }, +// _error => { +// latestWrite = Promise.resolve(); +// } +// ); +// }); +// } + +// next(); + +// return promise.promise; +// } diff --git a/cli/js/streams/readable-internals.ts b/cli/js/streams/readable-internals.ts index 6f892a4c8d7e09..36f4223d79f5bf 100644 --- a/cli/js/streams/readable-internals.ts +++ b/cli/js/streams/readable-internals.ts @@ -11,11 +11,9 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ // TODO reenable this lint here -import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; import * as q from "./queue-mixin.ts"; import { - PipeOptions, QueuingStrategy, QueuingStrategySizeCallback, UnderlyingSource, @@ -192,12 +190,12 @@ export declare class SDReadableStreamBYOBReader [readIntoRequests_]: Array>>; } -// ---- - +/* TODO reenable this when we add WritableStreams and Transforms export interface GenericTransformStream { readable: SDReadableStream; writable: ws.WritableStream; } +*/ export type ReadableStreamState = "readable" | "closed" | "errored"; @@ -217,6 +215,7 @@ export declare class SDReadableStream { getReader(options: { mode: "byob" }): SDReadableStreamBYOBReader; tee(): Array>; + /* TODO reenable these methods when we bring in writableStreams and transport types pipeThrough( transform: GenericTransformStream, options?: PipeOptions @@ -225,7 +224,7 @@ export declare class SDReadableStream { dest: ws.WritableStream, options?: PipeOptions ): Promise; - + */ [shared.state_]: ReadableStreamState; [shared.storedError_]: shared.ErrorResult; [reader_]: SDReadableStreamReader | undefined; diff --git a/cli/js/streams/readable-stream.ts b/cli/js/streams/readable-stream.ts index cadd858ddfa7af..0c06a1041774b8 100644 --- a/cli/js/streams/readable-stream.ts +++ b/cli/js/streams/readable-stream.ts @@ -13,11 +13,8 @@ // 284:7 error 'branch1' is never reassigned. Use 'const' instead prefer-const import * as rs from "./readable-internals.ts"; -import * as ws from "./writable-internals.ts"; import * as shared from "./shared-internals.ts"; -import { pipeTo } from "./pipe-to.ts"; import { - PipeOptions, QueuingStrategy, QueuingStrategySizeCallback, UnderlyingSource, @@ -137,6 +134,7 @@ export class SDReadableStream return readableStreamTee(this, false); } + /* TODO reenable these methods when we bring in writableStreams and transport types pipeThrough( transform: rs.GenericTransformStream, options: PipeOptions = {} @@ -193,6 +191,7 @@ export class SDReadableStream return pipeTo(this, dest, options); } + */ } export function createReadableStream( diff --git a/cli/js/streams/transform-internals.ts b/cli/js/streams/transform-internals.ts index e659a4965bbb7f..4c5e3657d6686a 100644 --- a/cli/js/streams/transform-internals.ts +++ b/cli/js/streams/transform-internals.ts @@ -1,370 +1,371 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT - -/** - * streams/transform-internals - internal types and functions for transform streams - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ - -/* eslint-disable @typescript-eslint/no-explicit-any */ -// TODO reenable this lint here - -import * as rs from "./readable-internals.ts"; -import * as ws from "./writable-internals.ts"; -import * as shared from "./shared-internals.ts"; - -import { createReadableStream } from "./readable-stream.ts"; -import { createWritableStream } from "./writable-stream.ts"; - -import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; - -export const state_ = Symbol("transformState_"); -export const backpressure_ = Symbol("backpressure_"); -export const backpressureChangePromise_ = Symbol("backpressureChangePromise_"); -export const readable_ = Symbol("readable_"); -export const transformStreamController_ = Symbol("transformStreamController_"); -export const writable_ = Symbol("writable_"); - -export const controlledTransformStream_ = Symbol("controlledTransformStream_"); -export const flushAlgorithm_ = Symbol("flushAlgorithm_"); -export const transformAlgorithm_ = Symbol("transformAlgorithm_"); - -// ---- - -export type TransformFunction = ( - chunk: InputType, - controller: TransformStreamDefaultController -) => void | PromiseLike; -export type TransformAlgorithm = (chunk: InputType) => Promise; -export type FlushFunction = ( - controller: TransformStreamDefaultController -) => void | PromiseLike; -export type FlushAlgorithm = () => Promise; - -// ---- - -export interface TransformStreamDefaultController { - readonly desiredSize: number | null; - enqueue(chunk: OutputType): void; - error(reason: shared.ErrorResult): void; - terminate(): void; - - [controlledTransformStream_]: TransformStream; // The TransformStream instance controlled; also used for the IsTransformStreamDefaultController brand check - [flushAlgorithm_]: FlushAlgorithm; // A promise - returning algorithm which communicates a requested close to the transformer - [transformAlgorithm_]: TransformAlgorithm; // A promise - returning algorithm, taking one argument(the chunk to transform), which requests the transformer perform its transformation -} - -export interface Transformer { - start?( - controller: TransformStreamDefaultController - ): void | PromiseLike; - transform?: TransformFunction; - flush?: FlushFunction; - - readableType?: undefined; // for future spec changes - writableType?: undefined; // for future spec changes -} - -export declare class TransformStream { - constructor( - transformer: Transformer, - writableStrategy: QueuingStrategy, - readableStrategy: QueuingStrategy - ); - - readonly readable: rs.SDReadableStream; - readonly writable: ws.WritableStream; - - [backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed - [backpressureChangePromise_]: shared.ControlledPromise | undefined; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes - [readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object - [transformStreamController_]: TransformStreamDefaultController< - InputType, - OutputType - >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check - [writable_]: ws.WritableStream; // The WritableStream instance controlled by this object -} - -// ---- TransformStream - -export function isTransformStream( - value: unknown -): value is TransformStream { - if (typeof value !== "object" || value === null) { - return false; - } - return transformStreamController_ in value; -} - -export function initializeTransformStream( - stream: TransformStream, - startPromise: Promise, - writableHighWaterMark: number, - writableSizeAlgorithm: QueuingStrategySizeCallback, - readableHighWaterMark: number, - readableSizeAlgorithm: QueuingStrategySizeCallback -): void { - const startAlgorithm = function(): Promise { - return startPromise; - }; - const writeAlgorithm = function(chunk: InputType): Promise { - return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); - }; - const abortAlgorithm = function(reason: shared.ErrorResult): Promise { - return transformStreamDefaultSinkAbortAlgorithm(stream, reason); - }; - const closeAlgorithm = function(): Promise { - return transformStreamDefaultSinkCloseAlgorithm(stream); - }; - stream[writable_] = createWritableStream( - startAlgorithm, - writeAlgorithm, - closeAlgorithm, - abortAlgorithm, - writableHighWaterMark, - writableSizeAlgorithm - ); - - const pullAlgorithm = function(): Promise { - return transformStreamDefaultSourcePullAlgorithm(stream); - }; - const cancelAlgorithm = function( - reason: shared.ErrorResult - ): Promise { - transformStreamErrorWritableAndUnblockWrite(stream, reason); - return Promise.resolve(undefined); - }; - stream[readable_] = createReadableStream( - startAlgorithm, - pullAlgorithm, - cancelAlgorithm, - readableHighWaterMark, - readableSizeAlgorithm - ); - - stream[backpressure_] = undefined; - stream[backpressureChangePromise_] = undefined; - transformStreamSetBackpressure(stream, true); - stream[transformStreamController_] = undefined!; // initialize slot for brand-check -} - -export function transformStreamError( - stream: TransformStream, - error: shared.ErrorResult -): void { - rs.readableStreamDefaultControllerError( - stream[readable_][ - rs.readableStreamController_ - ] as rs.SDReadableStreamDefaultController, - error - ); - transformStreamErrorWritableAndUnblockWrite(stream, error); -} - -export function transformStreamErrorWritableAndUnblockWrite< - InputType, - OutputType ->( - stream: TransformStream, - error: shared.ErrorResult -): void { - transformStreamDefaultControllerClearAlgorithms( - stream[transformStreamController_] - ); - ws.writableStreamDefaultControllerErrorIfNeeded( - stream[writable_][ws.writableStreamController_]!, - error - ); - if (stream[backpressure_]) { - transformStreamSetBackpressure(stream, false); - } -} - -export function transformStreamSetBackpressure( - stream: TransformStream, - backpressure: boolean -): void { - // Assert: stream.[[backpressure]] is not backpressure. - if (stream[backpressure_] !== undefined) { - stream[backpressureChangePromise_]!.resolve(undefined); - } - stream[backpressureChangePromise_] = shared.createControlledPromise(); - stream[backpressure_] = backpressure; -} - -// ---- TransformStreamDefaultController - -export function isTransformStreamDefaultController( - value: unknown -): value is TransformStreamDefaultController { - if (typeof value !== "object" || value === null) { - return false; - } - return controlledTransformStream_ in value; -} - -export function setUpTransformStreamDefaultController( - stream: TransformStream, - controller: TransformStreamDefaultController, - transformAlgorithm: TransformAlgorithm, - flushAlgorithm: FlushAlgorithm -): void { - // Assert: ! IsTransformStream(stream) is true. - // Assert: stream.[[transformStreamController]] is undefined. - controller[controlledTransformStream_] = stream; - stream[transformStreamController_] = controller; - controller[transformAlgorithm_] = transformAlgorithm; - controller[flushAlgorithm_] = flushAlgorithm; -} - -export function transformStreamDefaultControllerClearAlgorithms< - InputType, - OutputType ->(controller: TransformStreamDefaultController): void { - // Use ! assertions to override type check here, this way we don't - // have to perform type checks/assertions everywhere else. - controller[transformAlgorithm_] = undefined!; - controller[flushAlgorithm_] = undefined!; -} - -export function transformStreamDefaultControllerEnqueue( - controller: TransformStreamDefaultController, - chunk: OutputType -): void { - const stream = controller[controlledTransformStream_]; - const readableController = stream[readable_][ - rs.readableStreamController_ - ] as rs.SDReadableStreamDefaultController; - if ( - !rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) - ) { - throw new TypeError(); - } - try { - rs.readableStreamDefaultControllerEnqueue(readableController, chunk); - } catch (error) { - transformStreamErrorWritableAndUnblockWrite(stream, error); - throw stream[readable_][shared.storedError_]; - } - const backpressure = rs.readableStreamDefaultControllerHasBackpressure( - readableController - ); - if (backpressure !== stream[backpressure_]) { - // Assert: backpressure is true. - transformStreamSetBackpressure(stream, true); - } -} - -export function transformStreamDefaultControllerError( - controller: TransformStreamDefaultController, - error: shared.ErrorResult -): void { - transformStreamError(controller[controlledTransformStream_], error); -} - -export function transformStreamDefaultControllerPerformTransform< - InputType, - OutputType ->( - controller: TransformStreamDefaultController, - chunk: InputType -): Promise { - const transformPromise = controller[transformAlgorithm_](chunk); - return transformPromise.catch(error => { - transformStreamError(controller[controlledTransformStream_], error); - throw error; - }); -} - -export function transformStreamDefaultControllerTerminate< - InputType, - OutputType ->(controller: TransformStreamDefaultController): void { - const stream = controller[controlledTransformStream_]; - const readableController = stream[readable_][ - rs.readableStreamController_ - ] as rs.SDReadableStreamDefaultController; - if (rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { - rs.readableStreamDefaultControllerClose(readableController); - } - const error = new TypeError("The transform stream has been terminated"); - transformStreamErrorWritableAndUnblockWrite(stream, error); -} - -// ---- Transform Sinks - -export function transformStreamDefaultSinkWriteAlgorithm( - stream: TransformStream, - chunk: InputType -): Promise { - // Assert: stream.[[writable]].[[state]] is "writable". - const controller = stream[transformStreamController_]; - if (stream[backpressure_]) { - const backpressureChangePromise = stream[backpressureChangePromise_]!; - // Assert: backpressureChangePromise is not undefined. - return backpressureChangePromise.promise.then(_ => { - const writable = stream[writable_]; - const state = writable[shared.state_]; - if (state === "erroring") { - throw writable[shared.storedError_]; - } - // Assert: state is "writable". - return transformStreamDefaultControllerPerformTransform( - controller, - chunk - ); - }); - } - return transformStreamDefaultControllerPerformTransform(controller, chunk); -} - -export function transformStreamDefaultSinkAbortAlgorithm( - stream: TransformStream, - reason: shared.ErrorResult -): Promise { - transformStreamError(stream, reason); - return Promise.resolve(undefined); -} - -export function transformStreamDefaultSinkCloseAlgorithm( - stream: TransformStream -): Promise { - const readable = stream[readable_]; - const controller = stream[transformStreamController_]; - const flushPromise = controller[flushAlgorithm_](); - transformStreamDefaultControllerClearAlgorithms(controller); - - return flushPromise.then( - _ => { - if (readable[shared.state_] === "errored") { - throw readable[shared.storedError_]; - } - const readableController = readable[ - rs.readableStreamController_ - ] as rs.SDReadableStreamDefaultController; - if ( - rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) - ) { - rs.readableStreamDefaultControllerClose(readableController); - } - }, - error => { - transformStreamError(stream, error); - throw readable[shared.storedError_]; - } - ); -} - -// ---- Transform Sources - -export function transformStreamDefaultSourcePullAlgorithm< - InputType, - OutputType ->(stream: TransformStream): Promise { - // Assert: stream.[[backpressure]] is true. - // Assert: stream.[[backpressureChangePromise]] is not undefined. - transformStreamSetBackpressure(stream, false); - return stream[backpressureChangePromise_]!.promise; -} +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +// /** +// * streams/transform-internals - internal types and functions for transform streams +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ + +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // TODO reenable this lint here + +// import * as rs from "./readable-internals.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as shared from "./shared-internals.ts"; + +// import { createReadableStream } from "./readable-stream.ts"; +// import { createWritableStream } from "./writable-stream.ts"; + +// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; + +// export const state_ = Symbol("transformState_"); +// export const backpressure_ = Symbol("backpressure_"); +// export const backpressureChangePromise_ = Symbol("backpressureChangePromise_"); +// export const readable_ = Symbol("readable_"); +// export const transformStreamController_ = Symbol("transformStreamController_"); +// export const writable_ = Symbol("writable_"); + +// export const controlledTransformStream_ = Symbol("controlledTransformStream_"); +// export const flushAlgorithm_ = Symbol("flushAlgorithm_"); +// export const transformAlgorithm_ = Symbol("transformAlgorithm_"); + +// // ---- + +// export type TransformFunction = ( +// chunk: InputType, +// controller: TransformStreamDefaultController +// ) => void | PromiseLike; +// export type TransformAlgorithm = (chunk: InputType) => Promise; +// export type FlushFunction = ( +// controller: TransformStreamDefaultController +// ) => void | PromiseLike; +// export type FlushAlgorithm = () => Promise; + +// // ---- + +// export interface TransformStreamDefaultController { +// readonly desiredSize: number | null; +// enqueue(chunk: OutputType): void; +// error(reason: shared.ErrorResult): void; +// terminate(): void; + +// [controlledTransformStream_]: TransformStream; // The TransformStream instance controlled; also used for the IsTransformStreamDefaultController brand check +// [flushAlgorithm_]: FlushAlgorithm; // A promise - returning algorithm which communicates a requested close to the transformer +// [transformAlgorithm_]: TransformAlgorithm; // A promise - returning algorithm, taking one argument(the chunk to transform), which requests the transformer perform its transformation +// } + +// export interface Transformer { +// start?( +// controller: TransformStreamDefaultController +// ): void | PromiseLike; +// transform?: TransformFunction; +// flush?: FlushFunction; + +// readableType?: undefined; // for future spec changes +// writableType?: undefined; // for future spec changes +// } + +// export declare class TransformStream { +// constructor( +// transformer: Transformer, +// writableStrategy: QueuingStrategy, +// readableStrategy: QueuingStrategy +// ); + +// readonly readable: rs.SDReadableStream; +// readonly writable: ws.WritableStream; + +// [backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed +// [backpressureChangePromise_]: shared.ControlledPromise | undefined; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes +// [readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object +// [transformStreamController_]: TransformStreamDefaultController< +// InputType, +// OutputType +// >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check +// [writable_]: ws.WritableStream; // The WritableStream instance controlled by this object +// } + +// // ---- TransformStream + +// export function isTransformStream( +// value: unknown +// ): value is TransformStream { +// if (typeof value !== "object" || value === null) { +// return false; +// } +// return transformStreamController_ in value; +// } + +// export function initializeTransformStream( +// stream: TransformStream, +// startPromise: Promise, +// writableHighWaterMark: number, +// writableSizeAlgorithm: QueuingStrategySizeCallback, +// readableHighWaterMark: number, +// readableSizeAlgorithm: QueuingStrategySizeCallback +// ): void { +// const startAlgorithm = function(): Promise { +// return startPromise; +// }; +// const writeAlgorithm = function(chunk: InputType): Promise { +// return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); +// }; +// const abortAlgorithm = function(reason: shared.ErrorResult): Promise { +// return transformStreamDefaultSinkAbortAlgorithm(stream, reason); +// }; +// const closeAlgorithm = function(): Promise { +// return transformStreamDefaultSinkCloseAlgorithm(stream); +// }; +// stream[writable_] = createWritableStream( +// startAlgorithm, +// writeAlgorithm, +// closeAlgorithm, +// abortAlgorithm, +// writableHighWaterMark, +// writableSizeAlgorithm +// ); + +// const pullAlgorithm = function(): Promise { +// return transformStreamDefaultSourcePullAlgorithm(stream); +// }; +// const cancelAlgorithm = function( +// reason: shared.ErrorResult +// ): Promise { +// transformStreamErrorWritableAndUnblockWrite(stream, reason); +// return Promise.resolve(undefined); +// }; +// stream[readable_] = createReadableStream( +// startAlgorithm, +// pullAlgorithm, +// cancelAlgorithm, +// readableHighWaterMark, +// readableSizeAlgorithm +// ); + +// stream[backpressure_] = undefined; +// stream[backpressureChangePromise_] = undefined; +// transformStreamSetBackpressure(stream, true); +// stream[transformStreamController_] = undefined!; // initialize slot for brand-check +// } + +// export function transformStreamError( +// stream: TransformStream, +// error: shared.ErrorResult +// ): void { +// rs.readableStreamDefaultControllerError( +// stream[readable_][ +// rs.readableStreamController_ +// ] as rs.SDReadableStreamDefaultController, +// error +// ); +// transformStreamErrorWritableAndUnblockWrite(stream, error); +// } + +// export function transformStreamErrorWritableAndUnblockWrite< +// InputType, +// OutputType +// >( +// stream: TransformStream, +// error: shared.ErrorResult +// ): void { +// transformStreamDefaultControllerClearAlgorithms( +// stream[transformStreamController_] +// ); +// ws.writableStreamDefaultControllerErrorIfNeeded( +// stream[writable_][ws.writableStreamController_]!, +// error +// ); +// if (stream[backpressure_]) { +// transformStreamSetBackpressure(stream, false); +// } +// } + +// export function transformStreamSetBackpressure( +// stream: TransformStream, +// backpressure: boolean +// ): void { +// // Assert: stream.[[backpressure]] is not backpressure. +// if (stream[backpressure_] !== undefined) { +// stream[backpressureChangePromise_]!.resolve(undefined); +// } +// stream[backpressureChangePromise_] = shared.createControlledPromise(); +// stream[backpressure_] = backpressure; +// } + +// // ---- TransformStreamDefaultController + +// export function isTransformStreamDefaultController( +// value: unknown +// ): value is TransformStreamDefaultController { +// if (typeof value !== "object" || value === null) { +// return false; +// } +// return controlledTransformStream_ in value; +// } + +// export function setUpTransformStreamDefaultController( +// stream: TransformStream, +// controller: TransformStreamDefaultController, +// transformAlgorithm: TransformAlgorithm, +// flushAlgorithm: FlushAlgorithm +// ): void { +// // Assert: ! IsTransformStream(stream) is true. +// // Assert: stream.[[transformStreamController]] is undefined. +// controller[controlledTransformStream_] = stream; +// stream[transformStreamController_] = controller; +// controller[transformAlgorithm_] = transformAlgorithm; +// controller[flushAlgorithm_] = flushAlgorithm; +// } + +// export function transformStreamDefaultControllerClearAlgorithms< +// InputType, +// OutputType +// >(controller: TransformStreamDefaultController): void { +// // Use ! assertions to override type check here, this way we don't +// // have to perform type checks/assertions everywhere else. +// controller[transformAlgorithm_] = undefined!; +// controller[flushAlgorithm_] = undefined!; +// } + +// export function transformStreamDefaultControllerEnqueue( +// controller: TransformStreamDefaultController, +// chunk: OutputType +// ): void { +// const stream = controller[controlledTransformStream_]; +// const readableController = stream[readable_][ +// rs.readableStreamController_ +// ] as rs.SDReadableStreamDefaultController; +// if ( +// !rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) +// ) { +// throw new TypeError(); +// } +// try { +// rs.readableStreamDefaultControllerEnqueue(readableController, chunk); +// } catch (error) { +// transformStreamErrorWritableAndUnblockWrite(stream, error); +// throw stream[readable_][shared.storedError_]; +// } +// const backpressure = rs.readableStreamDefaultControllerHasBackpressure( +// readableController +// ); +// if (backpressure !== stream[backpressure_]) { +// // Assert: backpressure is true. +// transformStreamSetBackpressure(stream, true); +// } +// } + +// export function transformStreamDefaultControllerError( +// controller: TransformStreamDefaultController, +// error: shared.ErrorResult +// ): void { +// transformStreamError(controller[controlledTransformStream_], error); +// } + +// export function transformStreamDefaultControllerPerformTransform< +// InputType, +// OutputType +// >( +// controller: TransformStreamDefaultController, +// chunk: InputType +// ): Promise { +// const transformPromise = controller[transformAlgorithm_](chunk); +// return transformPromise.catch(error => { +// transformStreamError(controller[controlledTransformStream_], error); +// throw error; +// }); +// } + +// export function transformStreamDefaultControllerTerminate< +// InputType, +// OutputType +// >(controller: TransformStreamDefaultController): void { +// const stream = controller[controlledTransformStream_]; +// const readableController = stream[readable_][ +// rs.readableStreamController_ +// ] as rs.SDReadableStreamDefaultController; +// if (rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { +// rs.readableStreamDefaultControllerClose(readableController); +// } +// const error = new TypeError("The transform stream has been terminated"); +// transformStreamErrorWritableAndUnblockWrite(stream, error); +// } + +// // ---- Transform Sinks + +// export function transformStreamDefaultSinkWriteAlgorithm( +// stream: TransformStream, +// chunk: InputType +// ): Promise { +// // Assert: stream.[[writable]].[[state]] is "writable". +// const controller = stream[transformStreamController_]; +// if (stream[backpressure_]) { +// const backpressureChangePromise = stream[backpressureChangePromise_]!; +// // Assert: backpressureChangePromise is not undefined. +// return backpressureChangePromise.promise.then(_ => { +// const writable = stream[writable_]; +// const state = writable[shared.state_]; +// if (state === "erroring") { +// throw writable[shared.storedError_]; +// } +// // Assert: state is "writable". +// return transformStreamDefaultControllerPerformTransform( +// controller, +// chunk +// ); +// }); +// } +// return transformStreamDefaultControllerPerformTransform(controller, chunk); +// } + +// export function transformStreamDefaultSinkAbortAlgorithm( +// stream: TransformStream, +// reason: shared.ErrorResult +// ): Promise { +// transformStreamError(stream, reason); +// return Promise.resolve(undefined); +// } + +// export function transformStreamDefaultSinkCloseAlgorithm( +// stream: TransformStream +// ): Promise { +// const readable = stream[readable_]; +// const controller = stream[transformStreamController_]; +// const flushPromise = controller[flushAlgorithm_](); +// transformStreamDefaultControllerClearAlgorithms(controller); + +// return flushPromise.then( +// _ => { +// if (readable[shared.state_] === "errored") { +// throw readable[shared.storedError_]; +// } +// const readableController = readable[ +// rs.readableStreamController_ +// ] as rs.SDReadableStreamDefaultController; +// if ( +// rs.readableStreamDefaultControllerCanCloseOrEnqueue(readableController) +// ) { +// rs.readableStreamDefaultControllerClose(readableController); +// } +// }, +// error => { +// transformStreamError(stream, error); +// throw readable[shared.storedError_]; +// } +// ); +// } + +// // ---- Transform Sources + +// export function transformStreamDefaultSourcePullAlgorithm< +// InputType, +// OutputType +// >(stream: TransformStream): Promise { +// // Assert: stream.[[backpressure]] is true. +// // Assert: stream.[[backpressureChangePromise]] is not undefined. +// transformStreamSetBackpressure(stream, false); +// return stream[backpressureChangePromise_]!.promise; +// } diff --git a/cli/js/streams/transform-stream-default-controller.ts b/cli/js/streams/transform-stream-default-controller.ts index 575823628e079f..24a8d08fd98175 100644 --- a/cli/js/streams/transform-stream-default-controller.ts +++ b/cli/js/streams/transform-stream-default-controller.ts @@ -1,57 +1,58 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT - -/** - * streams/transform-stream-default-controller - TransformStreamDefaultController class implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ - -import * as rs from "./readable-internals.ts"; -import * as ts from "./transform-internals.ts"; -import { ErrorResult } from "./shared-internals.ts"; - -export class TransformStreamDefaultController - implements ts.TransformStreamDefaultController { - [ts.controlledTransformStream_]: ts.TransformStream; - [ts.flushAlgorithm_]: ts.FlushAlgorithm; - [ts.transformAlgorithm_]: ts.TransformAlgorithm; - - constructor() { - throw new TypeError(); - } - - get desiredSize(): number | null { - if (!ts.isTransformStreamDefaultController(this)) { - throw new TypeError(); - } - const readableController = this[ts.controlledTransformStream_][ - ts.readable_ - ][rs.readableStreamController_] as rs.SDReadableStreamDefaultController< - OutputType - >; - return rs.readableStreamDefaultControllerGetDesiredSize(readableController); - } - - enqueue(chunk: OutputType): void { - if (!ts.isTransformStreamDefaultController(this)) { - throw new TypeError(); - } - ts.transformStreamDefaultControllerEnqueue(this, chunk); - } - - error(reason: ErrorResult): void { - if (!ts.isTransformStreamDefaultController(this)) { - throw new TypeError(); - } - ts.transformStreamDefaultControllerError(this, reason); - } - - terminate(): void { - if (!ts.isTransformStreamDefaultController(this)) { - throw new TypeError(); - } - ts.transformStreamDefaultControllerTerminate(this); - } -} +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +// /** +// * streams/transform-stream-default-controller - TransformStreamDefaultController class implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ + +// import * as rs from "./readable-internals.ts"; +// import * as ts from "./transform-internals.ts"; +// import { ErrorResult } from "./shared-internals.ts"; + +// export class TransformStreamDefaultController +// implements ts.TransformStreamDefaultController { +// [ts.controlledTransformStream_]: ts.TransformStream; +// [ts.flushAlgorithm_]: ts.FlushAlgorithm; +// [ts.transformAlgorithm_]: ts.TransformAlgorithm; + +// constructor() { +// throw new TypeError(); +// } + +// get desiredSize(): number | null { +// if (!ts.isTransformStreamDefaultController(this)) { +// throw new TypeError(); +// } +// const readableController = this[ts.controlledTransformStream_][ +// ts.readable_ +// ][rs.readableStreamController_] as rs.SDReadableStreamDefaultController< +// OutputType +// >; +// return rs.readableStreamDefaultControllerGetDesiredSize(readableController); +// } + +// enqueue(chunk: OutputType): void { +// if (!ts.isTransformStreamDefaultController(this)) { +// throw new TypeError(); +// } +// ts.transformStreamDefaultControllerEnqueue(this, chunk); +// } + +// error(reason: ErrorResult): void { +// if (!ts.isTransformStreamDefaultController(this)) { +// throw new TypeError(); +// } +// ts.transformStreamDefaultControllerError(this, reason); +// } + +// terminate(): void { +// if (!ts.isTransformStreamDefaultController(this)) { +// throw new TypeError(); +// } +// ts.transformStreamDefaultControllerTerminate(this); +// } +// } diff --git a/cli/js/streams/transform-stream.ts b/cli/js/streams/transform-stream.ts index dfd241e143abed..090f781358cbd0 100644 --- a/cli/js/streams/transform-stream.ts +++ b/cli/js/streams/transform-stream.ts @@ -1,146 +1,147 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT -/** - * streams/transform-stream - TransformStream class implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ +// /** +// * streams/transform-stream - TransformStream class implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ -/* eslint-disable @typescript-eslint/no-explicit-any */ -// TODO reenable this lint here +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // TODO reenable this lint here -import * as rs from "./readable-internals.ts"; -import * as ws from "./writable-internals.ts"; -import * as ts from "./transform-internals.ts"; -import * as shared from "./shared-internals.ts"; -import { TransformStreamDefaultController } from "./transform-stream-default-controller.ts"; -import { QueuingStrategy } from "../dom_types.ts"; +// import * as rs from "./readable-internals.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as ts from "./transform-internals.ts"; +// import * as shared from "./shared-internals.ts"; +// import { TransformStreamDefaultController } from "./transform-stream-default-controller.ts"; +// import { QueuingStrategy } from "../dom_types.ts"; -export class TransformStream { - [ts.backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed - [ts.backpressureChangePromise_]: shared.ControlledPromise; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes - [ts.readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object - [ts.transformStreamController_]: TransformStreamDefaultController< - InputType, - OutputType - >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check - [ts.writable_]: ws.WritableStream; // The WritableStream instance controlled by this object +// export class TransformStream { +// [ts.backpressure_]: boolean | undefined; // Whether there was backpressure on [[readable]] the last time it was observed +// [ts.backpressureChangePromise_]: shared.ControlledPromise; // A promise which is fulfilled and replaced every time the value of[[backpressure]] changes +// [ts.readable_]: rs.SDReadableStream; // The ReadableStream instance controlled by this object +// [ts.transformStreamController_]: TransformStreamDefaultController< +// InputType, +// OutputType +// >; // A TransformStreamDefaultController created with the ability to control[[readable]] and[[writable]]; also used for the IsTransformStream brand check +// [ts.writable_]: ws.WritableStream; // The WritableStream instance controlled by this object - constructor( - transformer: ts.Transformer = {}, - writableStrategy: QueuingStrategy = {}, - readableStrategy: QueuingStrategy = {} - ) { - const writableSizeFunction = writableStrategy.size; - const writableHighWaterMark = writableStrategy.highWaterMark; - const readableSizeFunction = readableStrategy.size; - const readableHighWaterMark = readableStrategy.highWaterMark; +// constructor( +// transformer: ts.Transformer = {}, +// writableStrategy: QueuingStrategy = {}, +// readableStrategy: QueuingStrategy = {} +// ) { +// const writableSizeFunction = writableStrategy.size; +// const writableHighWaterMark = writableStrategy.highWaterMark; +// const readableSizeFunction = readableStrategy.size; +// const readableHighWaterMark = readableStrategy.highWaterMark; - const writableType = transformer.writableType; - if (writableType !== undefined) { - throw new RangeError( - "The transformer's `writableType` field must be undefined" - ); - } - const writableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( - writableSizeFunction - ); - const writableHWM = shared.validateAndNormalizeHighWaterMark( - writableHighWaterMark === undefined ? 1 : writableHighWaterMark - ); +// const writableType = transformer.writableType; +// if (writableType !== undefined) { +// throw new RangeError( +// "The transformer's `writableType` field must be undefined" +// ); +// } +// const writableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( +// writableSizeFunction +// ); +// const writableHWM = shared.validateAndNormalizeHighWaterMark( +// writableHighWaterMark === undefined ? 1 : writableHighWaterMark +// ); - const readableType = transformer.readableType; - if (readableType !== undefined) { - throw new RangeError( - "The transformer's `readableType` field must be undefined" - ); - } - const readableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( - readableSizeFunction - ); - const readableHWM = shared.validateAndNormalizeHighWaterMark( - readableHighWaterMark === undefined ? 0 : readableHighWaterMark - ); +// const readableType = transformer.readableType; +// if (readableType !== undefined) { +// throw new RangeError( +// "The transformer's `readableType` field must be undefined" +// ); +// } +// const readableSizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction( +// readableSizeFunction +// ); +// const readableHWM = shared.validateAndNormalizeHighWaterMark( +// readableHighWaterMark === undefined ? 0 : readableHighWaterMark +// ); - const startPromise = shared.createControlledPromise(); - ts.initializeTransformStream( - this, - startPromise.promise, - writableHWM, - writableSizeAlgorithm, - readableHWM, - readableSizeAlgorithm - ); - setUpTransformStreamDefaultControllerFromTransformer(this, transformer); +// const startPromise = shared.createControlledPromise(); +// ts.initializeTransformStream( +// this, +// startPromise.promise, +// writableHWM, +// writableSizeAlgorithm, +// readableHWM, +// readableSizeAlgorithm +// ); +// setUpTransformStreamDefaultControllerFromTransformer(this, transformer); - const startResult = shared.invokeOrNoop(transformer, "start", [ - this[ts.transformStreamController_] - ]); - startPromise.resolve(startResult); - } +// const startResult = shared.invokeOrNoop(transformer, "start", [ +// this[ts.transformStreamController_] +// ]); +// startPromise.resolve(startResult); +// } - get readable(): rs.SDReadableStream { - if (!ts.isTransformStream(this)) { - throw new TypeError(); - } - return this[ts.readable_]; - } +// get readable(): rs.SDReadableStream { +// if (!ts.isTransformStream(this)) { +// throw new TypeError(); +// } +// return this[ts.readable_]; +// } - get writable(): ws.WritableStream { - if (!ts.isTransformStream(this)) { - throw new TypeError(); - } - return this[ts.writable_]; - } -} +// get writable(): ws.WritableStream { +// if (!ts.isTransformStream(this)) { +// throw new TypeError(); +// } +// return this[ts.writable_]; +// } +// } -function setUpTransformStreamDefaultControllerFromTransformer< - InputType, - OutputType ->( - stream: TransformStream, - transformer: ts.Transformer -): void { - const controller = Object.create( - TransformStreamDefaultController.prototype - ) as TransformStreamDefaultController; - let transformAlgorithm: ts.TransformAlgorithm; +// function setUpTransformStreamDefaultControllerFromTransformer< +// InputType, +// OutputType +// >( +// stream: TransformStream, +// transformer: ts.Transformer +// ): void { +// const controller = Object.create( +// TransformStreamDefaultController.prototype +// ) as TransformStreamDefaultController; +// let transformAlgorithm: ts.TransformAlgorithm; - const transformMethod = transformer.transform; - if (transformMethod !== undefined) { - if (typeof transformMethod !== "function") { - throw new TypeError( - "`transform` field of the transformer must be a function" - ); - } - transformAlgorithm = (chunk: InputType): Promise => - shared.promiseCall(transformMethod, transformer, [chunk, controller]); - } else { - // use identity transform - transformAlgorithm = function(chunk: InputType): Promise { - try { - // OutputType and InputType are the same here - ts.transformStreamDefaultControllerEnqueue( - controller, - (chunk as unknown) as OutputType - ); - } catch (error) { - return Promise.reject(error); - } - return Promise.resolve(undefined); - }; - } - const flushAlgorithm = shared.createAlgorithmFromUnderlyingMethod( - transformer, - "flush", - [controller] - ); - ts.setUpTransformStreamDefaultController( - stream, - controller, - transformAlgorithm, - flushAlgorithm - ); -} +// const transformMethod = transformer.transform; +// if (transformMethod !== undefined) { +// if (typeof transformMethod !== "function") { +// throw new TypeError( +// "`transform` field of the transformer must be a function" +// ); +// } +// transformAlgorithm = (chunk: InputType): Promise => +// shared.promiseCall(transformMethod, transformer, [chunk, controller]); +// } else { +// // use identity transform +// transformAlgorithm = function(chunk: InputType): Promise { +// try { +// // OutputType and InputType are the same here +// ts.transformStreamDefaultControllerEnqueue( +// controller, +// (chunk as unknown) as OutputType +// ); +// } catch (error) { +// return Promise.reject(error); +// } +// return Promise.resolve(undefined); +// }; +// } +// const flushAlgorithm = shared.createAlgorithmFromUnderlyingMethod( +// transformer, +// "flush", +// [controller] +// ); +// ts.setUpTransformStreamDefaultController( +// stream, +// controller, +// transformAlgorithm, +// flushAlgorithm +// ); +// } diff --git a/cli/js/streams/writable-internals.ts b/cli/js/streams/writable-internals.ts index df0f7a0b8e3b33..78bb19a28242f6 100644 --- a/cli/js/streams/writable-internals.ts +++ b/cli/js/streams/writable-internals.ts @@ -1,799 +1,800 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT - -/** - * streams/writable-internals - internal types and functions for writable streams - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ - -/* eslint-disable @typescript-eslint/no-explicit-any */ -// TODO reenable this lint here - -import * as shared from "./shared-internals.ts"; -import * as q from "./queue-mixin.ts"; - -import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; - -export const backpressure_ = Symbol("backpressure_"); -export const closeRequest_ = Symbol("closeRequest_"); -export const inFlightWriteRequest_ = Symbol("inFlightWriteRequest_"); -export const inFlightCloseRequest_ = Symbol("inFlightCloseRequest_"); -export const pendingAbortRequest_ = Symbol("pendingAbortRequest_"); -export const writableStreamController_ = Symbol("writableStreamController_"); -export const writer_ = Symbol("writer_"); -export const writeRequests_ = Symbol("writeRequests_"); - -export const abortAlgorithm_ = Symbol("abortAlgorithm_"); -export const closeAlgorithm_ = Symbol("closeAlgorithm_"); -export const controlledWritableStream_ = Symbol("controlledWritableStream_"); -export const started_ = Symbol("started_"); -export const strategyHWM_ = Symbol("strategyHWM_"); -export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_"); -export const writeAlgorithm_ = Symbol("writeAlgorithm_"); - -export const ownerWritableStream_ = Symbol("ownerWritableStream_"); -export const closedPromise_ = Symbol("closedPromise_"); -export const readyPromise_ = Symbol("readyPromise_"); - -export const errorSteps_ = Symbol("errorSteps_"); -export const abortSteps_ = Symbol("abortSteps_"); - -export type StartFunction = ( - controller: WritableStreamController -) => void | PromiseLike; -export type StartAlgorithm = () => Promise | void; -export type WriteFunction = ( - chunk: InputType, - controller: WritableStreamController -) => void | PromiseLike; -export type WriteAlgorithm = (chunk: InputType) => Promise; -export type CloseAlgorithm = () => Promise; -export type AbortAlgorithm = (reason?: shared.ErrorResult) => Promise; - -// ---- - -export interface WritableStreamController { - error(e?: shared.ErrorResult): void; - - [errorSteps_](): void; - [abortSteps_](reason: shared.ErrorResult): Promise; -} - -export interface WriteRecord { - chunk: InputType; -} - -export interface WritableStreamDefaultController - extends WritableStreamController, - q.QueueContainer | "close"> { - [abortAlgorithm_]: AbortAlgorithm; // A promise - returning algorithm, taking one argument(the abort reason), which communicates a requested abort to the underlying sink - [closeAlgorithm_]: CloseAlgorithm; // A promise - returning algorithm which communicates a requested close to the underlying sink - [controlledWritableStream_]: WritableStream; // The WritableStream instance controlled - [started_]: boolean; // A boolean flag indicating whether the underlying sink has finished starting - [strategyHWM_]: number; // A number supplied by the creator of the stream as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying sink - [strategySizeAlgorithm_]: QueuingStrategySizeCallback; // An algorithm to calculate the size of enqueued chunks, as part of the stream’s queuing strategy - [writeAlgorithm_]: WriteAlgorithm; // A promise-returning algorithm, taking one argument (the chunk to write), which writes data to the underlying sink -} - -// ---- - -export interface WritableStreamWriter { - readonly closed: Promise; - readonly desiredSize: number | null; - readonly ready: Promise; - - abort(reason: shared.ErrorResult): Promise; - close(): Promise; - releaseLock(): void; - write(chunk: InputType): Promise; -} - -export interface WritableStreamDefaultWriter - extends WritableStreamWriter { - [ownerWritableStream_]: WritableStream | undefined; - [closedPromise_]: shared.ControlledPromise; - [readyPromise_]: shared.ControlledPromise; -} - -// ---- - -export type WritableStreamState = - | "writable" - | "closed" - | "erroring" - | "errored"; - -export interface WritableStreamSink { - start?: StartFunction; - write?: WriteFunction; - close?(): void | PromiseLike; - abort?(reason?: shared.ErrorResult): void; - - type?: undefined; // unused, for future revisions -} - -export interface AbortRequest { - reason: shared.ErrorResult; - wasAlreadyErroring: boolean; - promise: Promise; - resolve(): void; - reject(error: shared.ErrorResult): void; -} - -export declare class WritableStream { - constructor( - underlyingSink?: WritableStreamSink, - strategy?: QueuingStrategy - ); - - readonly locked: boolean; - abort(reason?: shared.ErrorResult): Promise; - getWriter(): WritableStreamWriter; - - [shared.state_]: WritableStreamState; - [backpressure_]: boolean; - [closeRequest_]: shared.ControlledPromise | undefined; - [inFlightWriteRequest_]: shared.ControlledPromise | undefined; - [inFlightCloseRequest_]: shared.ControlledPromise | undefined; - [pendingAbortRequest_]: AbortRequest | undefined; - [shared.storedError_]: shared.ErrorResult; - [writableStreamController_]: - | WritableStreamDefaultController - | undefined; - [writer_]: WritableStreamDefaultWriter | undefined; - [writeRequests_]: Array>; -} - -// ---- Stream - -export function initializeWritableStream( - stream: WritableStream -): void { - stream[shared.state_] = "writable"; - stream[shared.storedError_] = undefined; - stream[writer_] = undefined; - stream[writableStreamController_] = undefined; - stream[inFlightWriteRequest_] = undefined; - stream[closeRequest_] = undefined; - stream[inFlightCloseRequest_] = undefined; - stream[pendingAbortRequest_] = undefined; - stream[writeRequests_] = []; - stream[backpressure_] = false; -} - -export function isWritableStream(value: unknown): value is WritableStream { - if (typeof value !== "object" || value === null) { - return false; - } - return writableStreamController_ in value; -} - -export function isWritableStreamLocked( - stream: WritableStream -): boolean { - return stream[writer_] !== undefined; -} - -export function writableStreamAbort( - stream: WritableStream, - reason: shared.ErrorResult -): Promise { - const state = stream[shared.state_]; - if (state === "closed" || state === "errored") { - return Promise.resolve(undefined); - } - let pending = stream[pendingAbortRequest_]; - if (pending !== undefined) { - return pending.promise; - } - // Assert: state is "writable" or "erroring". - let wasAlreadyErroring = false; - if (state === "erroring") { - wasAlreadyErroring = true; - reason = undefined; - } - - pending = { - reason, - wasAlreadyErroring - } as AbortRequest; - const promise = new Promise((resolve, reject) => { - pending!.resolve = resolve; - pending!.reject = reject; - }); - pending.promise = promise; - stream[pendingAbortRequest_] = pending; - if (!wasAlreadyErroring) { - writableStreamStartErroring(stream, reason); - } - return promise; -} - -export function writableStreamAddWriteRequest( - stream: WritableStream -): Promise { - // Assert: !IsWritableStreamLocked(stream) is true. - // Assert: stream.[[state]] is "writable". - const writePromise = shared.createControlledPromise(); - stream[writeRequests_].push(writePromise); - return writePromise.promise; -} - -export function writableStreamDealWithRejection( - stream: WritableStream, - error: shared.ErrorResult -): void { - const state = stream[shared.state_]; - if (state === "writable") { - writableStreamStartErroring(stream, error); - return; - } - // Assert: state is "erroring" - writableStreamFinishErroring(stream); -} - -export function writableStreamStartErroring( - stream: WritableStream, - reason: shared.ErrorResult -): void { - // Assert: stream.[[storedError]] is undefined. - // Assert: stream.[[state]] is "writable". - const controller = stream[writableStreamController_]!; - // Assert: controller is not undefined. - stream[shared.state_] = "erroring"; - stream[shared.storedError_] = reason; - const writer = stream[writer_]; - if (writer !== undefined) { - writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); - } - if ( - !writableStreamHasOperationMarkedInFlight(stream) && - controller[started_] - ) { - writableStreamFinishErroring(stream); - } -} - -export function writableStreamFinishErroring( - stream: WritableStream -): void { - // Assert: stream.[[state]] is "erroring". - // Assert: writableStreamHasOperationMarkedInFlight(stream) is false. - stream[shared.state_] = "errored"; - const controller = stream[writableStreamController_]!; - controller[errorSteps_](); - const storedError = stream[shared.storedError_]; - for (const writeRequest of stream[writeRequests_]) { - writeRequest.reject(storedError); - } - stream[writeRequests_] = []; - - const abortRequest = stream[pendingAbortRequest_]; - if (abortRequest === undefined) { - writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; - } - stream[pendingAbortRequest_] = undefined; - if (abortRequest.wasAlreadyErroring) { - abortRequest.reject(storedError); - writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; - } - const promise = controller[abortSteps_](abortRequest.reason); - promise.then( - _ => { - abortRequest.resolve(); - writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - }, - error => { - abortRequest.reject(error); - writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - } - ); -} - -export function writableStreamFinishInFlightWrite( - stream: WritableStream -): void { - // Assert: stream.[[inFlightWriteRequest]] is not undefined. - stream[inFlightWriteRequest_]!.resolve(undefined); - stream[inFlightWriteRequest_] = undefined; -} - -export function writableStreamFinishInFlightWriteWithError( - stream: WritableStream, - error: shared.ErrorResult -): void { - // Assert: stream.[[inFlightWriteRequest]] is not undefined. - stream[inFlightWriteRequest_]!.reject(error); - stream[inFlightWriteRequest_] = undefined; - // Assert: stream.[[state]] is "writable" or "erroring". - writableStreamDealWithRejection(stream, error); -} - -export function writableStreamFinishInFlightClose( - stream: WritableStream -): void { - // Assert: stream.[[inFlightCloseRequest]] is not undefined. - stream[inFlightCloseRequest_]!.resolve(undefined); - stream[inFlightCloseRequest_] = undefined; - const state = stream[shared.state_]; - // Assert: stream.[[state]] is "writable" or "erroring". - if (state === "erroring") { - stream[shared.storedError_] = undefined; - if (stream[pendingAbortRequest_] !== undefined) { - stream[pendingAbortRequest_]!.resolve(); - stream[pendingAbortRequest_] = undefined; - } - } - stream[shared.state_] = "closed"; - const writer = stream[writer_]; - if (writer !== undefined) { - writer[closedPromise_].resolve(undefined); - } - // Assert: stream.[[pendingAbortRequest]] is undefined. - // Assert: stream.[[storedError]] is undefined. -} - -export function writableStreamFinishInFlightCloseWithError( - stream: WritableStream, - error: shared.ErrorResult -): void { - // Assert: stream.[[inFlightCloseRequest]] is not undefined. - stream[inFlightCloseRequest_]!.reject(error); - stream[inFlightCloseRequest_] = undefined; - // Assert: stream.[[state]] is "writable" or "erroring". - if (stream[pendingAbortRequest_] !== undefined) { - stream[pendingAbortRequest_]!.reject(error); - stream[pendingAbortRequest_] = undefined; - } - writableStreamDealWithRejection(stream, error); -} - -export function writableStreamCloseQueuedOrInFlight( - stream: WritableStream -): boolean { - return ( - stream[closeRequest_] !== undefined || - stream[inFlightCloseRequest_] !== undefined - ); -} - -export function writableStreamHasOperationMarkedInFlight( - stream: WritableStream -): boolean { - return ( - stream[inFlightWriteRequest_] !== undefined || - stream[inFlightCloseRequest_] !== undefined - ); -} - -export function writableStreamMarkCloseRequestInFlight( - stream: WritableStream -): void { - // Assert: stream.[[inFlightCloseRequest]] is undefined. - // Assert: stream.[[closeRequest]] is not undefined. - stream[inFlightCloseRequest_] = stream[closeRequest_]; - stream[closeRequest_] = undefined; -} - -export function writableStreamMarkFirstWriteRequestInFlight( - stream: WritableStream -): void { - // Assert: stream.[[inFlightWriteRequest]] is undefined. - // Assert: stream.[[writeRequests]] is not empty. - const writeRequest = stream[writeRequests_].shift()!; - stream[inFlightWriteRequest_] = writeRequest; -} - -export function writableStreamRejectCloseAndClosedPromiseIfNeeded( - stream: WritableStream -): void { - // Assert: stream.[[state]] is "errored". - const closeRequest = stream[closeRequest_]; - if (closeRequest !== undefined) { - // Assert: stream.[[inFlightCloseRequest]] is undefined. - closeRequest.reject(stream[shared.storedError_]); - stream[closeRequest_] = undefined; - } - const writer = stream[writer_]; - if (writer !== undefined) { - writer[closedPromise_].reject(stream[shared.storedError_]); - writer[closedPromise_].promise.catch(() => {}); - } -} - -export function writableStreamUpdateBackpressure( - stream: WritableStream, - backpressure: boolean -): void { - // Assert: stream.[[state]] is "writable". - // Assert: !WritableStreamCloseQueuedOrInFlight(stream) is false. - const writer = stream[writer_]; - if (writer !== undefined && backpressure !== stream[backpressure_]) { - if (backpressure) { - writer[readyPromise_] = shared.createControlledPromise(); - } else { - writer[readyPromise_].resolve(undefined); - } - } - stream[backpressure_] = backpressure; -} - -// ---- Writers - -export function isWritableStreamDefaultWriter( - value: unknown -): value is WritableStreamDefaultWriter { - if (typeof value !== "object" || value === null) { - return false; - } - return ownerWritableStream_ in value; -} - -export function writableStreamDefaultWriterAbort( - writer: WritableStreamDefaultWriter, - reason: shared.ErrorResult -): Promise { - const stream = writer[ownerWritableStream_]!; - // Assert: stream is not undefined. - return writableStreamAbort(stream, reason); -} - -export function writableStreamDefaultWriterClose( - writer: WritableStreamDefaultWriter -): Promise { - const stream = writer[ownerWritableStream_]!; - // Assert: stream is not undefined. - const state = stream[shared.state_]; - if (state === "closed" || state === "errored") { - return Promise.reject( - new TypeError("Writer stream is already closed or errored") - ); - } - // Assert: state is "writable" or "erroring". - // Assert: writableStreamCloseQueuedOrInFlight(stream) is false. - const closePromise = shared.createControlledPromise(); - stream[closeRequest_] = closePromise; - if (stream[backpressure_] && state === "writable") { - writer[readyPromise_].resolve(undefined); - } - writableStreamDefaultControllerClose(stream[writableStreamController_]!); - return closePromise.promise; -} - -export function writableStreamDefaultWriterCloseWithErrorPropagation( - writer: WritableStreamDefaultWriter -): Promise { - const stream = writer[ownerWritableStream_]!; - // Assert: stream is not undefined. - const state = stream[shared.state_]; - if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { - return Promise.resolve(undefined); - } - if (state === "errored") { - return Promise.reject(stream[shared.storedError_]); - } - // Assert: state is "writable" or "erroring". - return writableStreamDefaultWriterClose(writer); -} - -export function writableStreamDefaultWriterEnsureClosedPromiseRejected< - InputType ->( - writer: WritableStreamDefaultWriter, - error: shared.ErrorResult -): void { - const closedPromise = writer[closedPromise_]; - if (closedPromise.state === shared.ControlledPromiseState.Pending) { - closedPromise.reject(error); - } else { - writer[closedPromise_] = shared.createControlledPromise(); - writer[closedPromise_].reject(error); - } - writer[closedPromise_].promise.catch(() => {}); -} - -export function writableStreamDefaultWriterEnsureReadyPromiseRejected< - InputType ->( - writer: WritableStreamDefaultWriter, - error: shared.ErrorResult -): void { - const readyPromise = writer[readyPromise_]; - if (readyPromise.state === shared.ControlledPromiseState.Pending) { - readyPromise.reject(error); - } else { - writer[readyPromise_] = shared.createControlledPromise(); - writer[readyPromise_].reject(error); - } - writer[readyPromise_].promise.catch(() => {}); -} - -export function writableStreamDefaultWriterGetDesiredSize( - writer: WritableStreamDefaultWriter -): number | null { - const stream = writer[ownerWritableStream_]!; - const state = stream[shared.state_]; - if (state === "errored" || state === "erroring") { - return null; - } - if (state === "closed") { - return 0; - } - return writableStreamDefaultControllerGetDesiredSize( - stream[writableStreamController_]! - ); -} - -export function writableStreamDefaultWriterRelease( - writer: WritableStreamDefaultWriter -): void { - const stream = writer[ownerWritableStream_]!; - // Assert: stream is not undefined. - // Assert: stream.[[writer]] is writer. - const releasedError = new TypeError(); - writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); - writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); - stream[writer_] = undefined; - writer[ownerWritableStream_] = undefined; -} - -export function writableStreamDefaultWriterWrite( - writer: WritableStreamDefaultWriter, - chunk: InputType -): Promise { - const stream = writer[ownerWritableStream_]!; - // Assert: stream is not undefined. - const controller = stream[writableStreamController_]!; - const chunkSize = writableStreamDefaultControllerGetChunkSize( - controller, - chunk - ); - if (writer[ownerWritableStream_] !== stream) { - return Promise.reject(new TypeError()); - } - const state = stream[shared.state_]; - if (state === "errored") { - return Promise.reject(stream[shared.storedError_]); - } - if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { - return Promise.reject( - new TypeError("Cannot write to a closing or closed stream") - ); - } - if (state === "erroring") { - return Promise.reject(stream[shared.storedError_]); - } - // Assert: state is "writable". - const promise = writableStreamAddWriteRequest(stream); - writableStreamDefaultControllerWrite(controller, chunk, chunkSize); - return promise; -} - -// ---- Controller - -export function setUpWritableStreamDefaultController( - stream: WritableStream, - controller: WritableStreamDefaultController, - startAlgorithm: StartAlgorithm, - writeAlgorithm: WriteAlgorithm, - closeAlgorithm: CloseAlgorithm, - abortAlgorithm: AbortAlgorithm, - highWaterMark: number, - sizeAlgorithm: QueuingStrategySizeCallback -): void { - if (!isWritableStream(stream)) { - throw new TypeError(); - } - if (stream[writableStreamController_] !== undefined) { - throw new TypeError(); - } - - controller[controlledWritableStream_] = stream; - stream[writableStreamController_] = controller; - q.resetQueue(controller); - controller[started_] = false; - controller[strategySizeAlgorithm_] = sizeAlgorithm; - controller[strategyHWM_] = highWaterMark; - controller[writeAlgorithm_] = writeAlgorithm; - controller[closeAlgorithm_] = closeAlgorithm; - controller[abortAlgorithm_] = abortAlgorithm; - const backpressure = writableStreamDefaultControllerGetBackpressure( - controller - ); - writableStreamUpdateBackpressure(stream, backpressure); - - const startResult = startAlgorithm(); - Promise.resolve(startResult).then( - _ => { - // Assert: stream.[[state]] is "writable" or "erroring". - controller[started_] = true; - writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - }, - error => { - // Assert: stream.[[state]] is "writable" or "erroring". - controller[started_] = true; - writableStreamDealWithRejection(stream, error); - } - ); -} - -export function isWritableStreamDefaultController( - value: unknown -): value is WritableStreamDefaultController { - if (typeof value !== "object" || value === null) { - return false; - } - return controlledWritableStream_ in value; -} - -export function writableStreamDefaultControllerClearAlgorithms( - controller: WritableStreamDefaultController -): void { - // Use ! assertions to override type check here, this way we don't - // have to perform type checks/assertions everywhere else. - controller[writeAlgorithm_] = undefined!; - controller[closeAlgorithm_] = undefined!; - controller[abortAlgorithm_] = undefined!; - controller[strategySizeAlgorithm_] = undefined!; -} - -export function writableStreamDefaultControllerClose( - controller: WritableStreamDefaultController -): void { - q.enqueueValueWithSize(controller, "close", 0); - writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); -} - -export function writableStreamDefaultControllerGetChunkSize( - controller: WritableStreamDefaultController, - chunk: InputType -): number { - let chunkSize: number; - try { - chunkSize = controller[strategySizeAlgorithm_](chunk); - } catch (error) { - writableStreamDefaultControllerErrorIfNeeded(controller, error); - chunkSize = 1; - } - return chunkSize; -} - -export function writableStreamDefaultControllerGetDesiredSize( - controller: WritableStreamDefaultController -): number { - return controller[strategyHWM_] - controller[q.queueTotalSize_]; -} - -export function writableStreamDefaultControllerWrite( - controller: WritableStreamDefaultController, - chunk: InputType, - chunkSize: number -): void { - try { - q.enqueueValueWithSize(controller, { chunk }, chunkSize); - } catch (error) { - writableStreamDefaultControllerErrorIfNeeded(controller, error); - return; - } - const stream = controller[controlledWritableStream_]; - if ( - !writableStreamCloseQueuedOrInFlight(stream) && - stream[shared.state_] === "writable" - ) { - const backpressure = writableStreamDefaultControllerGetBackpressure( - controller - ); - writableStreamUpdateBackpressure(stream, backpressure); - } - writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); -} - -export function writableStreamDefaultControllerAdvanceQueueIfNeeded( - controller: WritableStreamDefaultController -): void { - if (!controller[started_]) { - return; - } - const stream = controller[controlledWritableStream_]; - if (stream[inFlightWriteRequest_] !== undefined) { - return; - } - const state = stream[shared.state_]; - if (state === "closed" || state === "errored") { - return; - } - if (state === "erroring") { - writableStreamFinishErroring(stream); - return; - } - if (controller[q.queue_].length === 0) { - return; - } - const writeRecord = q.peekQueueValue(controller); - if (writeRecord === "close") { - writableStreamDefaultControllerProcessClose(controller); - } else { - writableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk); - } -} - -export function writableStreamDefaultControllerErrorIfNeeded( - controller: WritableStreamDefaultController, - error: shared.ErrorResult -): void { - if (controller[controlledWritableStream_][shared.state_] === "writable") { - writableStreamDefaultControllerError(controller, error); - } -} - -export function writableStreamDefaultControllerProcessClose( - controller: WritableStreamDefaultController -): void { - const stream = controller[controlledWritableStream_]; - writableStreamMarkCloseRequestInFlight(stream); - q.dequeueValue(controller); - // Assert: controller.[[queue]] is empty. - const sinkClosePromise = controller[closeAlgorithm_](); - writableStreamDefaultControllerClearAlgorithms(controller); - sinkClosePromise.then( - _ => { - writableStreamFinishInFlightClose(stream); - }, - error => { - writableStreamFinishInFlightCloseWithError(stream, error); - } - ); -} - -export function writableStreamDefaultControllerProcessWrite( - controller: WritableStreamDefaultController, - chunk: InputType -): void { - const stream = controller[controlledWritableStream_]; - writableStreamMarkFirstWriteRequestInFlight(stream); - controller[writeAlgorithm_](chunk).then( - _ => { - writableStreamFinishInFlightWrite(stream); - const state = stream[shared.state_]; - // Assert: state is "writable" or "erroring". - q.dequeueValue(controller); - if ( - !writableStreamCloseQueuedOrInFlight(stream) && - state === "writable" - ) { - const backpressure = writableStreamDefaultControllerGetBackpressure( - controller - ); - writableStreamUpdateBackpressure(stream, backpressure); - } - writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - }, - error => { - if (stream[shared.state_] === "writable") { - writableStreamDefaultControllerClearAlgorithms(controller); - } - writableStreamFinishInFlightWriteWithError(stream, error); - } - ); -} - -export function writableStreamDefaultControllerGetBackpressure( - controller: WritableStreamDefaultController -): boolean { - const desiredSize = writableStreamDefaultControllerGetDesiredSize(controller); - return desiredSize <= 0; -} - -export function writableStreamDefaultControllerError( - controller: WritableStreamDefaultController, - error: shared.ErrorResult -): void { - const stream = controller[controlledWritableStream_]; - // Assert: stream.[[state]] is "writable". - writableStreamDefaultControllerClearAlgorithms(controller); - writableStreamStartErroring(stream, error); -} +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT + +// /** +// * streams/writable-internals - internal types and functions for writable streams +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ + +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // TODO reenable this lint here + +// import * as shared from "./shared-internals.ts"; +// import * as q from "./queue-mixin.ts"; + +// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; + +// export const backpressure_ = Symbol("backpressure_"); +// export const closeRequest_ = Symbol("closeRequest_"); +// export const inFlightWriteRequest_ = Symbol("inFlightWriteRequest_"); +// export const inFlightCloseRequest_ = Symbol("inFlightCloseRequest_"); +// export const pendingAbortRequest_ = Symbol("pendingAbortRequest_"); +// export const writableStreamController_ = Symbol("writableStreamController_"); +// export const writer_ = Symbol("writer_"); +// export const writeRequests_ = Symbol("writeRequests_"); + +// export const abortAlgorithm_ = Symbol("abortAlgorithm_"); +// export const closeAlgorithm_ = Symbol("closeAlgorithm_"); +// export const controlledWritableStream_ = Symbol("controlledWritableStream_"); +// export const started_ = Symbol("started_"); +// export const strategyHWM_ = Symbol("strategyHWM_"); +// export const strategySizeAlgorithm_ = Symbol("strategySizeAlgorithm_"); +// export const writeAlgorithm_ = Symbol("writeAlgorithm_"); + +// export const ownerWritableStream_ = Symbol("ownerWritableStream_"); +// export const closedPromise_ = Symbol("closedPromise_"); +// export const readyPromise_ = Symbol("readyPromise_"); + +// export const errorSteps_ = Symbol("errorSteps_"); +// export const abortSteps_ = Symbol("abortSteps_"); + +// export type StartFunction = ( +// controller: WritableStreamController +// ) => void | PromiseLike; +// export type StartAlgorithm = () => Promise | void; +// export type WriteFunction = ( +// chunk: InputType, +// controller: WritableStreamController +// ) => void | PromiseLike; +// export type WriteAlgorithm = (chunk: InputType) => Promise; +// export type CloseAlgorithm = () => Promise; +// export type AbortAlgorithm = (reason?: shared.ErrorResult) => Promise; + +// // ---- + +// export interface WritableStreamController { +// error(e?: shared.ErrorResult): void; + +// [errorSteps_](): void; +// [abortSteps_](reason: shared.ErrorResult): Promise; +// } + +// export interface WriteRecord { +// chunk: InputType; +// } + +// export interface WritableStreamDefaultController +// extends WritableStreamController, +// q.QueueContainer | "close"> { +// [abortAlgorithm_]: AbortAlgorithm; // A promise - returning algorithm, taking one argument(the abort reason), which communicates a requested abort to the underlying sink +// [closeAlgorithm_]: CloseAlgorithm; // A promise - returning algorithm which communicates a requested close to the underlying sink +// [controlledWritableStream_]: WritableStream; // The WritableStream instance controlled +// [started_]: boolean; // A boolean flag indicating whether the underlying sink has finished starting +// [strategyHWM_]: number; // A number supplied by the creator of the stream as part of the stream’s queuing strategy, indicating the point at which the stream will apply backpressure to its underlying sink +// [strategySizeAlgorithm_]: QueuingStrategySizeCallback; // An algorithm to calculate the size of enqueued chunks, as part of the stream’s queuing strategy +// [writeAlgorithm_]: WriteAlgorithm; // A promise-returning algorithm, taking one argument (the chunk to write), which writes data to the underlying sink +// } + +// // ---- + +// export interface WritableStreamWriter { +// readonly closed: Promise; +// readonly desiredSize: number | null; +// readonly ready: Promise; + +// abort(reason: shared.ErrorResult): Promise; +// close(): Promise; +// releaseLock(): void; +// write(chunk: InputType): Promise; +// } + +// export interface WritableStreamDefaultWriter +// extends WritableStreamWriter { +// [ownerWritableStream_]: WritableStream | undefined; +// [closedPromise_]: shared.ControlledPromise; +// [readyPromise_]: shared.ControlledPromise; +// } + +// // ---- + +// export type WritableStreamState = +// | "writable" +// | "closed" +// | "erroring" +// | "errored"; + +// export interface WritableStreamSink { +// start?: StartFunction; +// write?: WriteFunction; +// close?(): void | PromiseLike; +// abort?(reason?: shared.ErrorResult): void; + +// type?: undefined; // unused, for future revisions +// } + +// export interface AbortRequest { +// reason: shared.ErrorResult; +// wasAlreadyErroring: boolean; +// promise: Promise; +// resolve(): void; +// reject(error: shared.ErrorResult): void; +// } + +// export declare class WritableStream { +// constructor( +// underlyingSink?: WritableStreamSink, +// strategy?: QueuingStrategy +// ); + +// readonly locked: boolean; +// abort(reason?: shared.ErrorResult): Promise; +// getWriter(): WritableStreamWriter; + +// [shared.state_]: WritableStreamState; +// [backpressure_]: boolean; +// [closeRequest_]: shared.ControlledPromise | undefined; +// [inFlightWriteRequest_]: shared.ControlledPromise | undefined; +// [inFlightCloseRequest_]: shared.ControlledPromise | undefined; +// [pendingAbortRequest_]: AbortRequest | undefined; +// [shared.storedError_]: shared.ErrorResult; +// [writableStreamController_]: +// | WritableStreamDefaultController +// | undefined; +// [writer_]: WritableStreamDefaultWriter | undefined; +// [writeRequests_]: Array>; +// } + +// // ---- Stream + +// export function initializeWritableStream( +// stream: WritableStream +// ): void { +// stream[shared.state_] = "writable"; +// stream[shared.storedError_] = undefined; +// stream[writer_] = undefined; +// stream[writableStreamController_] = undefined; +// stream[inFlightWriteRequest_] = undefined; +// stream[closeRequest_] = undefined; +// stream[inFlightCloseRequest_] = undefined; +// stream[pendingAbortRequest_] = undefined; +// stream[writeRequests_] = []; +// stream[backpressure_] = false; +// } + +// export function isWritableStream(value: unknown): value is WritableStream { +// if (typeof value !== "object" || value === null) { +// return false; +// } +// return writableStreamController_ in value; +// } + +// export function isWritableStreamLocked( +// stream: WritableStream +// ): boolean { +// return stream[writer_] !== undefined; +// } + +// export function writableStreamAbort( +// stream: WritableStream, +// reason: shared.ErrorResult +// ): Promise { +// const state = stream[shared.state_]; +// if (state === "closed" || state === "errored") { +// return Promise.resolve(undefined); +// } +// let pending = stream[pendingAbortRequest_]; +// if (pending !== undefined) { +// return pending.promise; +// } +// // Assert: state is "writable" or "erroring". +// let wasAlreadyErroring = false; +// if (state === "erroring") { +// wasAlreadyErroring = true; +// reason = undefined; +// } + +// pending = { +// reason, +// wasAlreadyErroring +// } as AbortRequest; +// const promise = new Promise((resolve, reject) => { +// pending!.resolve = resolve; +// pending!.reject = reject; +// }); +// pending.promise = promise; +// stream[pendingAbortRequest_] = pending; +// if (!wasAlreadyErroring) { +// writableStreamStartErroring(stream, reason); +// } +// return promise; +// } + +// export function writableStreamAddWriteRequest( +// stream: WritableStream +// ): Promise { +// // Assert: !IsWritableStreamLocked(stream) is true. +// // Assert: stream.[[state]] is "writable". +// const writePromise = shared.createControlledPromise(); +// stream[writeRequests_].push(writePromise); +// return writePromise.promise; +// } + +// export function writableStreamDealWithRejection( +// stream: WritableStream, +// error: shared.ErrorResult +// ): void { +// const state = stream[shared.state_]; +// if (state === "writable") { +// writableStreamStartErroring(stream, error); +// return; +// } +// // Assert: state is "erroring" +// writableStreamFinishErroring(stream); +// } + +// export function writableStreamStartErroring( +// stream: WritableStream, +// reason: shared.ErrorResult +// ): void { +// // Assert: stream.[[storedError]] is undefined. +// // Assert: stream.[[state]] is "writable". +// const controller = stream[writableStreamController_]!; +// // Assert: controller is not undefined. +// stream[shared.state_] = "erroring"; +// stream[shared.storedError_] = reason; +// const writer = stream[writer_]; +// if (writer !== undefined) { +// writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); +// } +// if ( +// !writableStreamHasOperationMarkedInFlight(stream) && +// controller[started_] +// ) { +// writableStreamFinishErroring(stream); +// } +// } + +// export function writableStreamFinishErroring( +// stream: WritableStream +// ): void { +// // Assert: stream.[[state]] is "erroring". +// // Assert: writableStreamHasOperationMarkedInFlight(stream) is false. +// stream[shared.state_] = "errored"; +// const controller = stream[writableStreamController_]!; +// controller[errorSteps_](); +// const storedError = stream[shared.storedError_]; +// for (const writeRequest of stream[writeRequests_]) { +// writeRequest.reject(storedError); +// } +// stream[writeRequests_] = []; + +// const abortRequest = stream[pendingAbortRequest_]; +// if (abortRequest === undefined) { +// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); +// return; +// } +// stream[pendingAbortRequest_] = undefined; +// if (abortRequest.wasAlreadyErroring) { +// abortRequest.reject(storedError); +// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); +// return; +// } +// const promise = controller[abortSteps_](abortRequest.reason); +// promise.then( +// _ => { +// abortRequest.resolve(); +// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); +// }, +// error => { +// abortRequest.reject(error); +// writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); +// } +// ); +// } + +// export function writableStreamFinishInFlightWrite( +// stream: WritableStream +// ): void { +// // Assert: stream.[[inFlightWriteRequest]] is not undefined. +// stream[inFlightWriteRequest_]!.resolve(undefined); +// stream[inFlightWriteRequest_] = undefined; +// } + +// export function writableStreamFinishInFlightWriteWithError( +// stream: WritableStream, +// error: shared.ErrorResult +// ): void { +// // Assert: stream.[[inFlightWriteRequest]] is not undefined. +// stream[inFlightWriteRequest_]!.reject(error); +// stream[inFlightWriteRequest_] = undefined; +// // Assert: stream.[[state]] is "writable" or "erroring". +// writableStreamDealWithRejection(stream, error); +// } + +// export function writableStreamFinishInFlightClose( +// stream: WritableStream +// ): void { +// // Assert: stream.[[inFlightCloseRequest]] is not undefined. +// stream[inFlightCloseRequest_]!.resolve(undefined); +// stream[inFlightCloseRequest_] = undefined; +// const state = stream[shared.state_]; +// // Assert: stream.[[state]] is "writable" or "erroring". +// if (state === "erroring") { +// stream[shared.storedError_] = undefined; +// if (stream[pendingAbortRequest_] !== undefined) { +// stream[pendingAbortRequest_]!.resolve(); +// stream[pendingAbortRequest_] = undefined; +// } +// } +// stream[shared.state_] = "closed"; +// const writer = stream[writer_]; +// if (writer !== undefined) { +// writer[closedPromise_].resolve(undefined); +// } +// // Assert: stream.[[pendingAbortRequest]] is undefined. +// // Assert: stream.[[storedError]] is undefined. +// } + +// export function writableStreamFinishInFlightCloseWithError( +// stream: WritableStream, +// error: shared.ErrorResult +// ): void { +// // Assert: stream.[[inFlightCloseRequest]] is not undefined. +// stream[inFlightCloseRequest_]!.reject(error); +// stream[inFlightCloseRequest_] = undefined; +// // Assert: stream.[[state]] is "writable" or "erroring". +// if (stream[pendingAbortRequest_] !== undefined) { +// stream[pendingAbortRequest_]!.reject(error); +// stream[pendingAbortRequest_] = undefined; +// } +// writableStreamDealWithRejection(stream, error); +// } + +// export function writableStreamCloseQueuedOrInFlight( +// stream: WritableStream +// ): boolean { +// return ( +// stream[closeRequest_] !== undefined || +// stream[inFlightCloseRequest_] !== undefined +// ); +// } + +// export function writableStreamHasOperationMarkedInFlight( +// stream: WritableStream +// ): boolean { +// return ( +// stream[inFlightWriteRequest_] !== undefined || +// stream[inFlightCloseRequest_] !== undefined +// ); +// } + +// export function writableStreamMarkCloseRequestInFlight( +// stream: WritableStream +// ): void { +// // Assert: stream.[[inFlightCloseRequest]] is undefined. +// // Assert: stream.[[closeRequest]] is not undefined. +// stream[inFlightCloseRequest_] = stream[closeRequest_]; +// stream[closeRequest_] = undefined; +// } + +// export function writableStreamMarkFirstWriteRequestInFlight( +// stream: WritableStream +// ): void { +// // Assert: stream.[[inFlightWriteRequest]] is undefined. +// // Assert: stream.[[writeRequests]] is not empty. +// const writeRequest = stream[writeRequests_].shift()!; +// stream[inFlightWriteRequest_] = writeRequest; +// } + +// export function writableStreamRejectCloseAndClosedPromiseIfNeeded( +// stream: WritableStream +// ): void { +// // Assert: stream.[[state]] is "errored". +// const closeRequest = stream[closeRequest_]; +// if (closeRequest !== undefined) { +// // Assert: stream.[[inFlightCloseRequest]] is undefined. +// closeRequest.reject(stream[shared.storedError_]); +// stream[closeRequest_] = undefined; +// } +// const writer = stream[writer_]; +// if (writer !== undefined) { +// writer[closedPromise_].reject(stream[shared.storedError_]); +// writer[closedPromise_].promise.catch(() => {}); +// } +// } + +// export function writableStreamUpdateBackpressure( +// stream: WritableStream, +// backpressure: boolean +// ): void { +// // Assert: stream.[[state]] is "writable". +// // Assert: !WritableStreamCloseQueuedOrInFlight(stream) is false. +// const writer = stream[writer_]; +// if (writer !== undefined && backpressure !== stream[backpressure_]) { +// if (backpressure) { +// writer[readyPromise_] = shared.createControlledPromise(); +// } else { +// writer[readyPromise_].resolve(undefined); +// } +// } +// stream[backpressure_] = backpressure; +// } + +// // ---- Writers + +// export function isWritableStreamDefaultWriter( +// value: unknown +// ): value is WritableStreamDefaultWriter { +// if (typeof value !== "object" || value === null) { +// return false; +// } +// return ownerWritableStream_ in value; +// } + +// export function writableStreamDefaultWriterAbort( +// writer: WritableStreamDefaultWriter, +// reason: shared.ErrorResult +// ): Promise { +// const stream = writer[ownerWritableStream_]!; +// // Assert: stream is not undefined. +// return writableStreamAbort(stream, reason); +// } + +// export function writableStreamDefaultWriterClose( +// writer: WritableStreamDefaultWriter +// ): Promise { +// const stream = writer[ownerWritableStream_]!; +// // Assert: stream is not undefined. +// const state = stream[shared.state_]; +// if (state === "closed" || state === "errored") { +// return Promise.reject( +// new TypeError("Writer stream is already closed or errored") +// ); +// } +// // Assert: state is "writable" or "erroring". +// // Assert: writableStreamCloseQueuedOrInFlight(stream) is false. +// const closePromise = shared.createControlledPromise(); +// stream[closeRequest_] = closePromise; +// if (stream[backpressure_] && state === "writable") { +// writer[readyPromise_].resolve(undefined); +// } +// writableStreamDefaultControllerClose(stream[writableStreamController_]!); +// return closePromise.promise; +// } + +// export function writableStreamDefaultWriterCloseWithErrorPropagation( +// writer: WritableStreamDefaultWriter +// ): Promise { +// const stream = writer[ownerWritableStream_]!; +// // Assert: stream is not undefined. +// const state = stream[shared.state_]; +// if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { +// return Promise.resolve(undefined); +// } +// if (state === "errored") { +// return Promise.reject(stream[shared.storedError_]); +// } +// // Assert: state is "writable" or "erroring". +// return writableStreamDefaultWriterClose(writer); +// } + +// export function writableStreamDefaultWriterEnsureClosedPromiseRejected< +// InputType +// >( +// writer: WritableStreamDefaultWriter, +// error: shared.ErrorResult +// ): void { +// const closedPromise = writer[closedPromise_]; +// if (closedPromise.state === shared.ControlledPromiseState.Pending) { +// closedPromise.reject(error); +// } else { +// writer[closedPromise_] = shared.createControlledPromise(); +// writer[closedPromise_].reject(error); +// } +// writer[closedPromise_].promise.catch(() => {}); +// } + +// export function writableStreamDefaultWriterEnsureReadyPromiseRejected< +// InputType +// >( +// writer: WritableStreamDefaultWriter, +// error: shared.ErrorResult +// ): void { +// const readyPromise = writer[readyPromise_]; +// if (readyPromise.state === shared.ControlledPromiseState.Pending) { +// readyPromise.reject(error); +// } else { +// writer[readyPromise_] = shared.createControlledPromise(); +// writer[readyPromise_].reject(error); +// } +// writer[readyPromise_].promise.catch(() => {}); +// } + +// export function writableStreamDefaultWriterGetDesiredSize( +// writer: WritableStreamDefaultWriter +// ): number | null { +// const stream = writer[ownerWritableStream_]!; +// const state = stream[shared.state_]; +// if (state === "errored" || state === "erroring") { +// return null; +// } +// if (state === "closed") { +// return 0; +// } +// return writableStreamDefaultControllerGetDesiredSize( +// stream[writableStreamController_]! +// ); +// } + +// export function writableStreamDefaultWriterRelease( +// writer: WritableStreamDefaultWriter +// ): void { +// const stream = writer[ownerWritableStream_]!; +// // Assert: stream is not undefined. +// // Assert: stream.[[writer]] is writer. +// const releasedError = new TypeError(); +// writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); +// writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); +// stream[writer_] = undefined; +// writer[ownerWritableStream_] = undefined; +// } + +// export function writableStreamDefaultWriterWrite( +// writer: WritableStreamDefaultWriter, +// chunk: InputType +// ): Promise { +// const stream = writer[ownerWritableStream_]!; +// // Assert: stream is not undefined. +// const controller = stream[writableStreamController_]!; +// const chunkSize = writableStreamDefaultControllerGetChunkSize( +// controller, +// chunk +// ); +// if (writer[ownerWritableStream_] !== stream) { +// return Promise.reject(new TypeError()); +// } +// const state = stream[shared.state_]; +// if (state === "errored") { +// return Promise.reject(stream[shared.storedError_]); +// } +// if (writableStreamCloseQueuedOrInFlight(stream) || state === "closed") { +// return Promise.reject( +// new TypeError("Cannot write to a closing or closed stream") +// ); +// } +// if (state === "erroring") { +// return Promise.reject(stream[shared.storedError_]); +// } +// // Assert: state is "writable". +// const promise = writableStreamAddWriteRequest(stream); +// writableStreamDefaultControllerWrite(controller, chunk, chunkSize); +// return promise; +// } + +// // ---- Controller + +// export function setUpWritableStreamDefaultController( +// stream: WritableStream, +// controller: WritableStreamDefaultController, +// startAlgorithm: StartAlgorithm, +// writeAlgorithm: WriteAlgorithm, +// closeAlgorithm: CloseAlgorithm, +// abortAlgorithm: AbortAlgorithm, +// highWaterMark: number, +// sizeAlgorithm: QueuingStrategySizeCallback +// ): void { +// if (!isWritableStream(stream)) { +// throw new TypeError(); +// } +// if (stream[writableStreamController_] !== undefined) { +// throw new TypeError(); +// } + +// controller[controlledWritableStream_] = stream; +// stream[writableStreamController_] = controller; +// q.resetQueue(controller); +// controller[started_] = false; +// controller[strategySizeAlgorithm_] = sizeAlgorithm; +// controller[strategyHWM_] = highWaterMark; +// controller[writeAlgorithm_] = writeAlgorithm; +// controller[closeAlgorithm_] = closeAlgorithm; +// controller[abortAlgorithm_] = abortAlgorithm; +// const backpressure = writableStreamDefaultControllerGetBackpressure( +// controller +// ); +// writableStreamUpdateBackpressure(stream, backpressure); + +// const startResult = startAlgorithm(); +// Promise.resolve(startResult).then( +// _ => { +// // Assert: stream.[[state]] is "writable" or "erroring". +// controller[started_] = true; +// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +// }, +// error => { +// // Assert: stream.[[state]] is "writable" or "erroring". +// controller[started_] = true; +// writableStreamDealWithRejection(stream, error); +// } +// ); +// } + +// export function isWritableStreamDefaultController( +// value: unknown +// ): value is WritableStreamDefaultController { +// if (typeof value !== "object" || value === null) { +// return false; +// } +// return controlledWritableStream_ in value; +// } + +// export function writableStreamDefaultControllerClearAlgorithms( +// controller: WritableStreamDefaultController +// ): void { +// // Use ! assertions to override type check here, this way we don't +// // have to perform type checks/assertions everywhere else. +// controller[writeAlgorithm_] = undefined!; +// controller[closeAlgorithm_] = undefined!; +// controller[abortAlgorithm_] = undefined!; +// controller[strategySizeAlgorithm_] = undefined!; +// } + +// export function writableStreamDefaultControllerClose( +// controller: WritableStreamDefaultController +// ): void { +// q.enqueueValueWithSize(controller, "close", 0); +// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +// } + +// export function writableStreamDefaultControllerGetChunkSize( +// controller: WritableStreamDefaultController, +// chunk: InputType +// ): number { +// let chunkSize: number; +// try { +// chunkSize = controller[strategySizeAlgorithm_](chunk); +// } catch (error) { +// writableStreamDefaultControllerErrorIfNeeded(controller, error); +// chunkSize = 1; +// } +// return chunkSize; +// } + +// export function writableStreamDefaultControllerGetDesiredSize( +// controller: WritableStreamDefaultController +// ): number { +// return controller[strategyHWM_] - controller[q.queueTotalSize_]; +// } + +// export function writableStreamDefaultControllerWrite( +// controller: WritableStreamDefaultController, +// chunk: InputType, +// chunkSize: number +// ): void { +// try { +// q.enqueueValueWithSize(controller, { chunk }, chunkSize); +// } catch (error) { +// writableStreamDefaultControllerErrorIfNeeded(controller, error); +// return; +// } +// const stream = controller[controlledWritableStream_]; +// if ( +// !writableStreamCloseQueuedOrInFlight(stream) && +// stream[shared.state_] === "writable" +// ) { +// const backpressure = writableStreamDefaultControllerGetBackpressure( +// controller +// ); +// writableStreamUpdateBackpressure(stream, backpressure); +// } +// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +// } + +// export function writableStreamDefaultControllerAdvanceQueueIfNeeded( +// controller: WritableStreamDefaultController +// ): void { +// if (!controller[started_]) { +// return; +// } +// const stream = controller[controlledWritableStream_]; +// if (stream[inFlightWriteRequest_] !== undefined) { +// return; +// } +// const state = stream[shared.state_]; +// if (state === "closed" || state === "errored") { +// return; +// } +// if (state === "erroring") { +// writableStreamFinishErroring(stream); +// return; +// } +// if (controller[q.queue_].length === 0) { +// return; +// } +// const writeRecord = q.peekQueueValue(controller); +// if (writeRecord === "close") { +// writableStreamDefaultControllerProcessClose(controller); +// } else { +// writableStreamDefaultControllerProcessWrite(controller, writeRecord.chunk); +// } +// } + +// export function writableStreamDefaultControllerErrorIfNeeded( +// controller: WritableStreamDefaultController, +// error: shared.ErrorResult +// ): void { +// if (controller[controlledWritableStream_][shared.state_] === "writable") { +// writableStreamDefaultControllerError(controller, error); +// } +// } + +// export function writableStreamDefaultControllerProcessClose( +// controller: WritableStreamDefaultController +// ): void { +// const stream = controller[controlledWritableStream_]; +// writableStreamMarkCloseRequestInFlight(stream); +// q.dequeueValue(controller); +// // Assert: controller.[[queue]] is empty. +// const sinkClosePromise = controller[closeAlgorithm_](); +// writableStreamDefaultControllerClearAlgorithms(controller); +// sinkClosePromise.then( +// _ => { +// writableStreamFinishInFlightClose(stream); +// }, +// error => { +// writableStreamFinishInFlightCloseWithError(stream, error); +// } +// ); +// } + +// export function writableStreamDefaultControllerProcessWrite( +// controller: WritableStreamDefaultController, +// chunk: InputType +// ): void { +// const stream = controller[controlledWritableStream_]; +// writableStreamMarkFirstWriteRequestInFlight(stream); +// controller[writeAlgorithm_](chunk).then( +// _ => { +// writableStreamFinishInFlightWrite(stream); +// const state = stream[shared.state_]; +// // Assert: state is "writable" or "erroring". +// q.dequeueValue(controller); +// if ( +// !writableStreamCloseQueuedOrInFlight(stream) && +// state === "writable" +// ) { +// const backpressure = writableStreamDefaultControllerGetBackpressure( +// controller +// ); +// writableStreamUpdateBackpressure(stream, backpressure); +// } +// writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +// }, +// error => { +// if (stream[shared.state_] === "writable") { +// writableStreamDefaultControllerClearAlgorithms(controller); +// } +// writableStreamFinishInFlightWriteWithError(stream, error); +// } +// ); +// } + +// export function writableStreamDefaultControllerGetBackpressure( +// controller: WritableStreamDefaultController +// ): boolean { +// const desiredSize = writableStreamDefaultControllerGetDesiredSize(controller); +// return desiredSize <= 0; +// } + +// export function writableStreamDefaultControllerError( +// controller: WritableStreamDefaultController, +// error: shared.ErrorResult +// ): void { +// const stream = controller[controlledWritableStream_]; +// // Assert: stream.[[state]] is "writable". +// writableStreamDefaultControllerClearAlgorithms(controller); +// writableStreamStartErroring(stream, error); +// } diff --git a/cli/js/streams/writable-stream-default-controller.ts b/cli/js/streams/writable-stream-default-controller.ts index 9a3886d21c9e7b..57ffe08fdae9ea 100644 --- a/cli/js/streams/writable-stream-default-controller.ts +++ b/cli/js/streams/writable-stream-default-controller.ts @@ -1,100 +1,101 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT -/** - * streams/writable-stream-default-controller - WritableStreamDefaultController class implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ +// /** +// * streams/writable-stream-default-controller - WritableStreamDefaultController class implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ -/* eslint-disable @typescript-eslint/no-explicit-any */ -// TODO reenable this lint here +// /* eslint-disable @typescript-eslint/no-explicit-any */ +// // TODO reenable this lint here -import * as ws from "./writable-internals.ts"; -import * as shared from "./shared-internals.ts"; -import * as q from "./queue-mixin.ts"; -import { Queue } from "./queue.ts"; -import { QueuingStrategySizeCallback } from "../dom_types.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as shared from "./shared-internals.ts"; +// import * as q from "./queue-mixin.ts"; +// import { Queue } from "./queue.ts"; +// import { QueuingStrategySizeCallback } from "../dom_types.ts"; -export class WritableStreamDefaultController - implements ws.WritableStreamDefaultController { - [ws.abortAlgorithm_]: ws.AbortAlgorithm; - [ws.closeAlgorithm_]: ws.CloseAlgorithm; - [ws.controlledWritableStream_]: ws.WritableStream; - [ws.started_]: boolean; - [ws.strategyHWM_]: number; - [ws.strategySizeAlgorithm_]: QueuingStrategySizeCallback; - [ws.writeAlgorithm_]: ws.WriteAlgorithm; +// export class WritableStreamDefaultController +// implements ws.WritableStreamDefaultController { +// [ws.abortAlgorithm_]: ws.AbortAlgorithm; +// [ws.closeAlgorithm_]: ws.CloseAlgorithm; +// [ws.controlledWritableStream_]: ws.WritableStream; +// [ws.started_]: boolean; +// [ws.strategyHWM_]: number; +// [ws.strategySizeAlgorithm_]: QueuingStrategySizeCallback; +// [ws.writeAlgorithm_]: ws.WriteAlgorithm; - [q.queue_]: Queue | "close">>; - [q.queueTotalSize_]: number; +// [q.queue_]: Queue | "close">>; +// [q.queueTotalSize_]: number; - constructor() { - throw new TypeError(); - } +// constructor() { +// throw new TypeError(); +// } - error(e?: shared.ErrorResult): void { - if (!ws.isWritableStreamDefaultController(this)) { - throw new TypeError(); - } - const state = this[ws.controlledWritableStream_][shared.state_]; - if (state !== "writable") { - return; - } - ws.writableStreamDefaultControllerError(this, e); - } +// error(e?: shared.ErrorResult): void { +// if (!ws.isWritableStreamDefaultController(this)) { +// throw new TypeError(); +// } +// const state = this[ws.controlledWritableStream_][shared.state_]; +// if (state !== "writable") { +// return; +// } +// ws.writableStreamDefaultControllerError(this, e); +// } - [ws.abortSteps_](reason: shared.ErrorResult): Promise { - const result = this[ws.abortAlgorithm_](reason); - ws.writableStreamDefaultControllerClearAlgorithms(this); - return result; - } +// [ws.abortSteps_](reason: shared.ErrorResult): Promise { +// const result = this[ws.abortAlgorithm_](reason); +// ws.writableStreamDefaultControllerClearAlgorithms(this); +// return result; +// } - [ws.errorSteps_](): void { - q.resetQueue(this); - } -} +// [ws.errorSteps_](): void { +// q.resetQueue(this); +// } +// } -export function setUpWritableStreamDefaultControllerFromUnderlyingSink< - InputType ->( - stream: ws.WritableStream, - underlyingSink: ws.WritableStreamSink, - highWaterMark: number, - sizeAlgorithm: QueuingStrategySizeCallback -): void { - // Assert: underlyingSink is not undefined. - const controller = Object.create( - WritableStreamDefaultController.prototype - ) as WritableStreamDefaultController; +// export function setUpWritableStreamDefaultControllerFromUnderlyingSink< +// InputType +// >( +// stream: ws.WritableStream, +// underlyingSink: ws.WritableStreamSink, +// highWaterMark: number, +// sizeAlgorithm: QueuingStrategySizeCallback +// ): void { +// // Assert: underlyingSink is not undefined. +// const controller = Object.create( +// WritableStreamDefaultController.prototype +// ) as WritableStreamDefaultController; - const startAlgorithm = function(): any { - return shared.invokeOrNoop(underlyingSink, "start", [controller]); - }; - const writeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( - underlyingSink, - "write", - [controller] - ); - const closeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( - underlyingSink, - "close", - [] - ); - const abortAlgorithm = shared.createAlgorithmFromUnderlyingMethod( - underlyingSink, - "abort", - [] - ); - ws.setUpWritableStreamDefaultController( - stream, - controller, - startAlgorithm, - writeAlgorithm, - closeAlgorithm, - abortAlgorithm, - highWaterMark, - sizeAlgorithm - ); -} +// const startAlgorithm = function(): any { +// return shared.invokeOrNoop(underlyingSink, "start", [controller]); +// }; +// const writeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( +// underlyingSink, +// "write", +// [controller] +// ); +// const closeAlgorithm = shared.createAlgorithmFromUnderlyingMethod( +// underlyingSink, +// "close", +// [] +// ); +// const abortAlgorithm = shared.createAlgorithmFromUnderlyingMethod( +// underlyingSink, +// "abort", +// [] +// ); +// ws.setUpWritableStreamDefaultController( +// stream, +// controller, +// startAlgorithm, +// writeAlgorithm, +// closeAlgorithm, +// abortAlgorithm, +// highWaterMark, +// sizeAlgorithm +// ); +// } diff --git a/cli/js/streams/writable-stream-default-writer.ts b/cli/js/streams/writable-stream-default-writer.ts index d05832389047c4..f38aa26bbc7e99 100644 --- a/cli/js/streams/writable-stream-default-writer.ts +++ b/cli/js/streams/writable-stream-default-writer.ts @@ -1,135 +1,136 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT -/** - * streams/writable-stream-default-writer - WritableStreamDefaultWriter class implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ +// /** +// * streams/writable-stream-default-writer - WritableStreamDefaultWriter class implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ -import * as ws from "./writable-internals.ts"; -import * as shared from "./shared-internals.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as shared from "./shared-internals.ts"; -export class WritableStreamDefaultWriter - implements ws.WritableStreamDefaultWriter { - [ws.ownerWritableStream_]: ws.WritableStream | undefined; - [ws.readyPromise_]: shared.ControlledPromise; - [ws.closedPromise_]: shared.ControlledPromise; +// export class WritableStreamDefaultWriter +// implements ws.WritableStreamDefaultWriter { +// [ws.ownerWritableStream_]: ws.WritableStream | undefined; +// [ws.readyPromise_]: shared.ControlledPromise; +// [ws.closedPromise_]: shared.ControlledPromise; - constructor(stream: ws.WritableStream) { - if (!ws.isWritableStream(stream)) { - throw new TypeError(); - } - if (ws.isWritableStreamLocked(stream)) { - throw new TypeError("Stream is already locked"); - } - this[ws.ownerWritableStream_] = stream; - stream[ws.writer_] = this; +// constructor(stream: ws.WritableStream) { +// if (!ws.isWritableStream(stream)) { +// throw new TypeError(); +// } +// if (ws.isWritableStreamLocked(stream)) { +// throw new TypeError("Stream is already locked"); +// } +// this[ws.ownerWritableStream_] = stream; +// stream[ws.writer_] = this; - const readyPromise = shared.createControlledPromise(); - const closedPromise = shared.createControlledPromise(); - this[ws.readyPromise_] = readyPromise; - this[ws.closedPromise_] = closedPromise; +// const readyPromise = shared.createControlledPromise(); +// const closedPromise = shared.createControlledPromise(); +// this[ws.readyPromise_] = readyPromise; +// this[ws.closedPromise_] = closedPromise; - const state = stream[shared.state_]; - if (state === "writable") { - if ( - !ws.writableStreamCloseQueuedOrInFlight(stream) && - stream[ws.backpressure_] - ) { - // OK Set this.[[readyPromise]] to a new promise. - } else { - readyPromise.resolve(undefined); - } - // OK Set this.[[closedPromise]] to a new promise. - } else if (state === "erroring") { - readyPromise.reject(stream[shared.storedError_]); - readyPromise.promise.catch(() => {}); - // OK Set this.[[closedPromise]] to a new promise. - } else if (state === "closed") { - readyPromise.resolve(undefined); - closedPromise.resolve(undefined); - } else { - // Assert: state is "errored". - const storedError = stream[shared.storedError_]; - readyPromise.reject(storedError); - readyPromise.promise.catch(() => {}); - closedPromise.reject(storedError); - closedPromise.promise.catch(() => {}); - } - } +// const state = stream[shared.state_]; +// if (state === "writable") { +// if ( +// !ws.writableStreamCloseQueuedOrInFlight(stream) && +// stream[ws.backpressure_] +// ) { +// // OK Set this.[[readyPromise]] to a new promise. +// } else { +// readyPromise.resolve(undefined); +// } +// // OK Set this.[[closedPromise]] to a new promise. +// } else if (state === "erroring") { +// readyPromise.reject(stream[shared.storedError_]); +// readyPromise.promise.catch(() => {}); +// // OK Set this.[[closedPromise]] to a new promise. +// } else if (state === "closed") { +// readyPromise.resolve(undefined); +// closedPromise.resolve(undefined); +// } else { +// // Assert: state is "errored". +// const storedError = stream[shared.storedError_]; +// readyPromise.reject(storedError); +// readyPromise.promise.catch(() => {}); +// closedPromise.reject(storedError); +// closedPromise.promise.catch(() => {}); +// } +// } - abort(reason: shared.ErrorResult): Promise { - if (!ws.isWritableStreamDefaultWriter(this)) { - return Promise.reject(new TypeError()); - } - if (this[ws.ownerWritableStream_] === undefined) { - return Promise.reject( - new TypeError("Writer is not connected to a stream") - ); - } - return ws.writableStreamDefaultWriterAbort(this, reason); - } +// abort(reason: shared.ErrorResult): Promise { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// return Promise.reject(new TypeError()); +// } +// if (this[ws.ownerWritableStream_] === undefined) { +// return Promise.reject( +// new TypeError("Writer is not connected to a stream") +// ); +// } +// return ws.writableStreamDefaultWriterAbort(this, reason); +// } - close(): Promise { - if (!ws.isWritableStreamDefaultWriter(this)) { - return Promise.reject(new TypeError()); - } - const stream = this[ws.ownerWritableStream_]; - if (stream === undefined) { - return Promise.reject( - new TypeError("Writer is not connected to a stream") - ); - } - if (ws.writableStreamCloseQueuedOrInFlight(stream)) { - return Promise.reject(new TypeError()); - } - return ws.writableStreamDefaultWriterClose(this); - } +// close(): Promise { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// return Promise.reject(new TypeError()); +// } +// const stream = this[ws.ownerWritableStream_]; +// if (stream === undefined) { +// return Promise.reject( +// new TypeError("Writer is not connected to a stream") +// ); +// } +// if (ws.writableStreamCloseQueuedOrInFlight(stream)) { +// return Promise.reject(new TypeError()); +// } +// return ws.writableStreamDefaultWriterClose(this); +// } - releaseLock(): void { - const stream = this[ws.ownerWritableStream_]; - if (stream === undefined) { - return; - } - // Assert: stream.[[writer]] is not undefined. - ws.writableStreamDefaultWriterRelease(this); - } +// releaseLock(): void { +// const stream = this[ws.ownerWritableStream_]; +// if (stream === undefined) { +// return; +// } +// // Assert: stream.[[writer]] is not undefined. +// ws.writableStreamDefaultWriterRelease(this); +// } - write(chunk: InputType): Promise { - if (!ws.isWritableStreamDefaultWriter(this)) { - return Promise.reject(new TypeError()); - } - if (this[ws.ownerWritableStream_] === undefined) { - return Promise.reject( - new TypeError("Writer is not connected to a stream") - ); - } - return ws.writableStreamDefaultWriterWrite(this, chunk); - } +// write(chunk: InputType): Promise { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// return Promise.reject(new TypeError()); +// } +// if (this[ws.ownerWritableStream_] === undefined) { +// return Promise.reject( +// new TypeError("Writer is not connected to a stream") +// ); +// } +// return ws.writableStreamDefaultWriterWrite(this, chunk); +// } - get closed(): Promise { - if (!ws.isWritableStreamDefaultWriter(this)) { - return Promise.reject(new TypeError()); - } - return this[ws.closedPromise_].promise; - } +// get closed(): Promise { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// return Promise.reject(new TypeError()); +// } +// return this[ws.closedPromise_].promise; +// } - get desiredSize(): number | null { - if (!ws.isWritableStreamDefaultWriter(this)) { - throw new TypeError(); - } - if (this[ws.ownerWritableStream_] === undefined) { - throw new TypeError("Writer is not connected to stream"); - } - return ws.writableStreamDefaultWriterGetDesiredSize(this); - } +// get desiredSize(): number | null { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// throw new TypeError(); +// } +// if (this[ws.ownerWritableStream_] === undefined) { +// throw new TypeError("Writer is not connected to stream"); +// } +// return ws.writableStreamDefaultWriterGetDesiredSize(this); +// } - get ready(): Promise { - if (!ws.isWritableStreamDefaultWriter(this)) { - return Promise.reject(new TypeError()); - } - return this[ws.readyPromise_].promise; - } -} +// get ready(): Promise { +// if (!ws.isWritableStreamDefaultWriter(this)) { +// return Promise.reject(new TypeError()); +// } +// return this[ws.readyPromise_].promise; +// } +// } diff --git a/cli/js/streams/writable-stream.ts b/cli/js/streams/writable-stream.ts index b6e4dd4ad1c34e..a6131c5d0277c7 100644 --- a/cli/js/streams/writable-stream.ts +++ b/cli/js/streams/writable-stream.ts @@ -1,117 +1,118 @@ -// Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 -// Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT +// TODO reenable this code when we enable writableStreams and transport types +// // Forked from https://github.com/stardazed/sd-streams/tree/8928cf04b035fd02fb1340b7eb541c76be37e546 +// // Copyright (c) 2018-Present by Arthur Langereis - @zenmumbler MIT -/** - * streams/writable-stream - WritableStream class implementation - * Part of Stardazed - * (c) 2018-Present by Arthur Langereis - @zenmumbler - * https://github.com/stardazed/sd-streams - */ +// /** +// * streams/writable-stream - WritableStream class implementation +// * Part of Stardazed +// * (c) 2018-Present by Arthur Langereis - @zenmumbler +// * https://github.com/stardazed/sd-streams +// */ -import * as ws from "./writable-internals.ts"; -import * as shared from "./shared-internals.ts"; -import { - WritableStreamDefaultController, - setUpWritableStreamDefaultControllerFromUnderlyingSink -} from "./writable-stream-default-controller.ts"; -import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; -import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; +// import * as ws from "./writable-internals.ts"; +// import * as shared from "./shared-internals.ts"; +// import { +// WritableStreamDefaultController, +// setUpWritableStreamDefaultControllerFromUnderlyingSink +// } from "./writable-stream-default-controller.ts"; +// import { WritableStreamDefaultWriter } from "./writable-stream-default-writer.ts"; +// import { QueuingStrategy, QueuingStrategySizeCallback } from "../dom_types.ts"; -export class WritableStream { - [shared.state_]: ws.WritableStreamState; - [shared.storedError_]: shared.ErrorResult; - [ws.backpressure_]: boolean; - [ws.closeRequest_]: shared.ControlledPromise | undefined; - [ws.inFlightWriteRequest_]: shared.ControlledPromise | undefined; - [ws.inFlightCloseRequest_]: shared.ControlledPromise | undefined; - [ws.pendingAbortRequest_]: ws.AbortRequest | undefined; - [ws.writableStreamController_]: - | ws.WritableStreamDefaultController - | undefined; - [ws.writer_]: ws.WritableStreamDefaultWriter | undefined; - [ws.writeRequests_]: Array>; +// export class WritableStream { +// [shared.state_]: ws.WritableStreamState; +// [shared.storedError_]: shared.ErrorResult; +// [ws.backpressure_]: boolean; +// [ws.closeRequest_]: shared.ControlledPromise | undefined; +// [ws.inFlightWriteRequest_]: shared.ControlledPromise | undefined; +// [ws.inFlightCloseRequest_]: shared.ControlledPromise | undefined; +// [ws.pendingAbortRequest_]: ws.AbortRequest | undefined; +// [ws.writableStreamController_]: +// | ws.WritableStreamDefaultController +// | undefined; +// [ws.writer_]: ws.WritableStreamDefaultWriter | undefined; +// [ws.writeRequests_]: Array>; - constructor( - sink: ws.WritableStreamSink = {}, - strategy: QueuingStrategy = {} - ) { - ws.initializeWritableStream(this); - const sizeFunc = strategy.size; - const stratHWM = strategy.highWaterMark; - if (sink.type !== undefined) { - throw new RangeError("The type of an underlying sink must be undefined"); - } +// constructor( +// sink: ws.WritableStreamSink = {}, +// strategy: QueuingStrategy = {} +// ) { +// ws.initializeWritableStream(this); +// const sizeFunc = strategy.size; +// const stratHWM = strategy.highWaterMark; +// if (sink.type !== undefined) { +// throw new RangeError("The type of an underlying sink must be undefined"); +// } - const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc); - const highWaterMark = shared.validateAndNormalizeHighWaterMark( - stratHWM === undefined ? 1 : stratHWM - ); +// const sizeAlgorithm = shared.makeSizeAlgorithmFromSizeFunction(sizeFunc); +// const highWaterMark = shared.validateAndNormalizeHighWaterMark( +// stratHWM === undefined ? 1 : stratHWM +// ); - setUpWritableStreamDefaultControllerFromUnderlyingSink( - this, - sink, - highWaterMark, - sizeAlgorithm - ); - } +// setUpWritableStreamDefaultControllerFromUnderlyingSink( +// this, +// sink, +// highWaterMark, +// sizeAlgorithm +// ); +// } - get locked(): boolean { - if (!ws.isWritableStream(this)) { - throw new TypeError(); - } - return ws.isWritableStreamLocked(this); - } +// get locked(): boolean { +// if (!ws.isWritableStream(this)) { +// throw new TypeError(); +// } +// return ws.isWritableStreamLocked(this); +// } - abort(reason?: shared.ErrorResult): Promise { - if (!ws.isWritableStream(this)) { - return Promise.reject(new TypeError()); - } - if (ws.isWritableStreamLocked(this)) { - return Promise.reject(new TypeError("Cannot abort a locked stream")); - } - return ws.writableStreamAbort(this, reason); - } +// abort(reason?: shared.ErrorResult): Promise { +// if (!ws.isWritableStream(this)) { +// return Promise.reject(new TypeError()); +// } +// if (ws.isWritableStreamLocked(this)) { +// return Promise.reject(new TypeError("Cannot abort a locked stream")); +// } +// return ws.writableStreamAbort(this, reason); +// } - getWriter(): ws.WritableStreamWriter { - if (!ws.isWritableStream(this)) { - throw new TypeError(); - } - return new WritableStreamDefaultWriter(this); - } -} +// getWriter(): ws.WritableStreamWriter { +// if (!ws.isWritableStream(this)) { +// throw new TypeError(); +// } +// return new WritableStreamDefaultWriter(this); +// } +// } -export function createWritableStream( - startAlgorithm: ws.StartAlgorithm, - writeAlgorithm: ws.WriteAlgorithm, - closeAlgorithm: ws.CloseAlgorithm, - abortAlgorithm: ws.AbortAlgorithm, - highWaterMark?: number, - sizeAlgorithm?: QueuingStrategySizeCallback -): WritableStream { - if (highWaterMark === undefined) { - highWaterMark = 1; - } - if (sizeAlgorithm === undefined) { - sizeAlgorithm = (): number => 1; - } - // Assert: ! IsNonNegativeNumber(highWaterMark) is true. +// export function createWritableStream( +// startAlgorithm: ws.StartAlgorithm, +// writeAlgorithm: ws.WriteAlgorithm, +// closeAlgorithm: ws.CloseAlgorithm, +// abortAlgorithm: ws.AbortAlgorithm, +// highWaterMark?: number, +// sizeAlgorithm?: QueuingStrategySizeCallback +// ): WritableStream { +// if (highWaterMark === undefined) { +// highWaterMark = 1; +// } +// if (sizeAlgorithm === undefined) { +// sizeAlgorithm = (): number => 1; +// } +// // Assert: ! IsNonNegativeNumber(highWaterMark) is true. - const stream = Object.create(WritableStream.prototype) as WritableStream< - InputType - >; - ws.initializeWritableStream(stream); - const controller = Object.create( - WritableStreamDefaultController.prototype - ) as WritableStreamDefaultController; - ws.setUpWritableStreamDefaultController( - stream, - controller, - startAlgorithm, - writeAlgorithm, - closeAlgorithm, - abortAlgorithm, - highWaterMark, - sizeAlgorithm - ); - return stream; -} +// const stream = Object.create(WritableStream.prototype) as WritableStream< +// InputType +// >; +// ws.initializeWritableStream(stream); +// const controller = Object.create( +// WritableStreamDefaultController.prototype +// ) as WritableStreamDefaultController; +// ws.setUpWritableStreamDefaultController( +// stream, +// controller, +// startAlgorithm, +// writeAlgorithm, +// closeAlgorithm, +// abortAlgorithm, +// highWaterMark, +// sizeAlgorithm +// ); +// return stream; +// } diff --git a/cli/msg.rs b/cli/msg.rs index 2ddac3b8d3000b..5e9053a41b7f52 100644 --- a/cli/msg.rs +++ b/cli/msg.rs @@ -59,9 +59,8 @@ pub enum ErrorKind { JSError = 50, TypeError = 51, - /** TODO These are DomException Types, and should be moved there when it exists */ + /** TODO this is a DomException type, and should be moved out of here when possible */ DataCloneError = 52, - AbortError = 53, } // Warning! The values in this enum are duplicated in js/compiler.ts