diff --git a/package.json b/package.json index 4785f75e9..c0d06a738 100644 --- a/package.json +++ b/package.json @@ -139,7 +139,6 @@ "mocha-junit-reporter": "^2.0.2", "mocha-multi-reporters": "^1.5.1", "octokit-plugin-create-pull-request": "^3.10.0", - "pacote": "^12.0.2", "rc-progress": "^3.1.4", "react": "^17.0.2", "react-dnd-html5-backend": "^14.0.2", @@ -150,7 +149,7 @@ "semver-utils": "^1.1.4", "shelljs": "^0.8.4", "styled-components": "^5.3.3", - "typescript": "^3.9.5", + "typescript": "^4.5.5", "wait-on": "^6.0.0", "weak-napi": "^2.0.2", "webpack": "^5.68.0", diff --git a/packages/core/chunked-sender/src/chunkedSender/createChunkedSender.js b/packages/core/chunked-sender/src/chunkedSender/createChunkedSender.js new file mode 100644 index 000000000..72037a669 --- /dev/null +++ b/packages/core/chunked-sender/src/chunkedSender/createChunkedSender.js @@ -0,0 +1,40 @@ +// @flow +import { logger } from "@rpldy/shared"; +import xhrSend from "@rpldy/sender"; +import { getMandatoryOptions } from "../utils"; +import processChunks from "./processChunks"; + +import type { BatchItem } from "@rpldy/shared"; +import type { OnProgress, SendResult } from "@rpldy/sender"; +import type { TriggerMethod } from "@rpldy/life-events"; +import type { ChunkedOptions, ChunkedSender, ChunkedSendOptions } from "../types"; + +const createChunkedSender = (chunkedOptions: ?ChunkedOptions, trigger: TriggerMethod): ChunkedSender => { + const options = getMandatoryOptions(chunkedOptions); + + const send = (items: BatchItem[], url: ?string, sendOptions: ChunkedSendOptions, onProgress: OnProgress): SendResult => { + let result; + + if (!options.chunked || items.length > 1 || items[0].url || !items[0].file.size) { + result = xhrSend(items, url, sendOptions, onProgress); + logger.debugLog(`chunkedSender: sending items as normal, un-chunked requests`); + } else { + logger.debugLog(`chunkedSender: sending file as a chunked request`); + result = processChunks( + items[0], + options, + url, + sendOptions, + onProgress, + trigger); + } + + return result; + }; + + return { + send, + }; +}; + +export default createChunkedSender; diff --git a/packages/core/chunked-sender/src/chunkedSender/getChunkedState.js b/packages/core/chunked-sender/src/chunkedSender/getChunkedState.js new file mode 100644 index 000000000..391cb13cb --- /dev/null +++ b/packages/core/chunked-sender/src/chunkedSender/getChunkedState.js @@ -0,0 +1,40 @@ +// @flow +import createState from "@rpldy/simple-state"; + +import type { ChunkedSendOptions, MandatoryChunkedOptions } from "../types"; +import type { Chunk, ChunkedState, State } from "./types"; + +const getChunkedState = ( + chunks: Chunk[], + url: ?string, + sendOptions: ChunkedSendOptions, + chunkedOptions: MandatoryChunkedOptions +): ChunkedState => { + const { state, update } = createState({ + finished: false, + aborted: false, + error: false, + uploaded: {}, + requests: {}, + responses: [], + chunkCount: chunks.length, + startByte: sendOptions.startByte || 0, + chunks, + url, + sendOptions, + ...chunkedOptions, + }); + + const getState = (): State => state; + + const updateState = (updater: (State) => void) => { + update(updater); + }; + + return { + getState, + updateState, + }; +}; + +export default getChunkedState; diff --git a/packages/core/chunked-sender/src/chunkedSender/getChunksToSend.js b/packages/core/chunked-sender/src/chunkedSender/getChunksToSend.js index ffe560120..9326ad1ef 100644 --- a/packages/core/chunked-sender/src/chunkedSender/getChunksToSend.js +++ b/packages/core/chunked-sender/src/chunkedSender/getChunksToSend.js @@ -1,8 +1,10 @@ // @flow import ChunkedSendError from "./ChunkedSendError"; -import type { State, Chunk } from "./types"; +import type { Chunk, ChunkedState } from "./types"; + +const getChunksToSend = (chunkedState: ChunkedState): Array => { + const state = chunkedState.getState(); -const getChunksToSend = (state: State): Array => { const chunks = [], inProgressIds = Object.keys(state.requests), parallel = state.parallel || 1; diff --git a/packages/core/chunked-sender/src/chunkedSender/handleChunkRequest.js b/packages/core/chunked-sender/src/chunkedSender/handleChunkRequest.js index 143ee0794..1632c9fb3 100644 --- a/packages/core/chunked-sender/src/chunkedSender/handleChunkRequest.js +++ b/packages/core/chunked-sender/src/chunkedSender/handleChunkRequest.js @@ -6,35 +6,44 @@ import { CHUNK_EVENTS } from "../consts"; import type { BatchItem } from "@rpldy/shared"; import type { OnProgress, SendResult } from "@rpldy/sender"; import type { TriggerMethod } from "@rpldy/life-events"; -import type { State } from "./types"; +import type { ChunkedState } from "./types"; const handleChunkRequest = ( - state: State, + chunkedState: ChunkedState, item: BatchItem, chunkId: string, chunkSendResult: SendResult, trigger: TriggerMethod, onProgress: OnProgress, ): Promise => { - state.requests[chunkId] = { - id: chunkId, - abort: chunkSendResult.abort, - }; + chunkedState.updateState((state) => { + state.requests[chunkId] = { + id: chunkId, + abort: chunkSendResult.abort, + }; + }); return chunkSendResult.request .then((result) => { logger.debugLog(`chunkedSender: request finished for chunk: ${chunkId} - `, result); - delete state.requests[chunkId]; + chunkedState.updateState((state) => { + delete state.requests[chunkId]; + }); - const index = state.chunks.findIndex((c) => c.id === chunkId); + const chunks = chunkedState.getState().chunks; + const index = chunks.findIndex((c) => c.id === chunkId); if (~index) { if (result.state === FILE_STATES.FINISHED) { - //remove chunk so eventually there are no more chunks to send - //TODO: splicing array is dangerous. Need to find a better (immutable) way to progress chunk upload - const spliced = state.chunks.splice(index, 1); - const finishedChunk = spliced[0]; + const finishedChunk = chunks[index]; + + chunkedState.updateState((state) => { + //remove chunk so eventually there are no more chunks to send + state.chunks = state.chunks.slice(0, index) + .concat(state.chunks.slice(index + 1)); + }); + const chunkSize = finishedChunk.end - finishedChunk.start; //issue progress event when chunk finished uploading, so item progress data is updated @@ -47,10 +56,14 @@ const handleChunkRequest = ( }); } else if (result.state !== FILE_STATES.ABORTED) { //increment attempt in case chunk failed (and not aborted) - state.chunks[index].attempt += 1; + chunkedState.updateState((state) => { + state.chunks[index].attempt += 1; + }); } - state.responses.push(result.response); + chunkedState.updateState((state) => { + state.responses.push(result.response); + }); } }); }; diff --git a/packages/core/chunked-sender/src/chunkedSender/index.js b/packages/core/chunked-sender/src/chunkedSender/index.js index 72037a669..96f33f2b2 100644 --- a/packages/core/chunked-sender/src/chunkedSender/index.js +++ b/packages/core/chunked-sender/src/chunkedSender/index.js @@ -1,40 +1 @@ -// @flow -import { logger } from "@rpldy/shared"; -import xhrSend from "@rpldy/sender"; -import { getMandatoryOptions } from "../utils"; -import processChunks from "./processChunks"; - -import type { BatchItem } from "@rpldy/shared"; -import type { OnProgress, SendResult } from "@rpldy/sender"; -import type { TriggerMethod } from "@rpldy/life-events"; -import type { ChunkedOptions, ChunkedSender, ChunkedSendOptions } from "../types"; - -const createChunkedSender = (chunkedOptions: ?ChunkedOptions, trigger: TriggerMethod): ChunkedSender => { - const options = getMandatoryOptions(chunkedOptions); - - const send = (items: BatchItem[], url: ?string, sendOptions: ChunkedSendOptions, onProgress: OnProgress): SendResult => { - let result; - - if (!options.chunked || items.length > 1 || items[0].url || !items[0].file.size) { - result = xhrSend(items, url, sendOptions, onProgress); - logger.debugLog(`chunkedSender: sending items as normal, un-chunked requests`); - } else { - logger.debugLog(`chunkedSender: sending file as a chunked request`); - result = processChunks( - items[0], - options, - url, - sendOptions, - onProgress, - trigger); - } - - return result; - }; - - return { - send, - }; -}; - -export default createChunkedSender; +export default from "./createChunkedSender"; diff --git a/packages/core/chunked-sender/src/chunkedSender/processChunkProgressData.js b/packages/core/chunked-sender/src/chunkedSender/processChunkProgressData.js index 43e4e2acf..75b354f36 100644 --- a/packages/core/chunked-sender/src/chunkedSender/processChunkProgressData.js +++ b/packages/core/chunked-sender/src/chunkedSender/processChunkProgressData.js @@ -1,9 +1,14 @@ // @flow import type { BatchItem } from "@rpldy/shared"; -import type { State } from "./types"; +import type { ChunkedState } from "./types"; -const processChunkProgressData = (state: State, item: BatchItem, chunkId: string, chunkUploaded: number): { loaded: number, total: number } => { - state.uploaded[chunkId] = Math.max(chunkUploaded, (state.uploaded[chunkId] || 0)); +const processChunkProgressData = (chunkedState: ChunkedState, item: BatchItem, chunkId: string, chunkUploaded: number): + { loaded: number, total: number } => { + chunkedState.updateState((state) => { + state.uploaded[chunkId] = Math.max(chunkUploaded, (state.uploaded[chunkId] || 0)); + }); + + const state = chunkedState.getState(); const loadedSum = Object.keys(state.uploaded) .reduce((res, id) => res + state.uploaded[id], diff --git a/packages/core/chunked-sender/src/chunkedSender/processChunks.js b/packages/core/chunked-sender/src/chunkedSender/processChunks.js index f36de232a..f6836d794 100644 --- a/packages/core/chunked-sender/src/chunkedSender/processChunks.js +++ b/packages/core/chunked-sender/src/chunkedSender/processChunks.js @@ -4,15 +4,17 @@ import getChunks from "./getChunks"; import sendChunks from "./sendChunks"; import { CHUNKED_SENDER_TYPE } from "../consts"; import processChunkProgressData from "./processChunkProgressData"; +import getChunkedState from "./getChunkedState"; import type { BatchItem } from "@rpldy/shared"; import type { OnProgress, SendResult } from "@rpldy/sender"; import type { TriggerMethod } from "@rpldy/life-events"; import type { MandatoryChunkedOptions, ChunkedSendOptions } from "../types"; -import type { State, ChunksSendResponse, Chunk } from "./types"; +import type { ChunksSendResponse, Chunk, ChunkedState } from "./types"; -export const abortChunkedRequest = (state: State, item: BatchItem): boolean => { +export const abortChunkedRequest = (chunkedState: ChunkedState, item: BatchItem): boolean => { logger.debugLog(`chunkedSender: aborting chunked upload for item: ${item.id}`); + const state = chunkedState.getState(); if (!state.finished && !state.aborted) { Object.keys(state.requests) @@ -21,31 +23,33 @@ export const abortChunkedRequest = (state: State, item: BatchItem): boolean => { state.requests[chunkId].abort(); }); - state.aborted = true; + chunkedState.updateState((state) => { + state.aborted = true; + }); } return state.aborted; }; export const process = ( - state: State, + chunkedState: ChunkedState, item: BatchItem, onProgress: OnProgress, trigger: TriggerMethod, ): ChunksSendResponse => { const onChunkProgress = (e, chunks: Chunk[]) => { //we only ever send one chunk per request - const progressData = processChunkProgressData(state, item, chunks[0].id, e.loaded); + const progressData = processChunkProgressData(chunkedState, item, chunks[0].id, e.loaded); onProgress(progressData, [item]); }; const sendPromise = new Promise((resolve) => { - sendChunks(state, item, onChunkProgress, resolve, trigger); + sendChunks(chunkedState, item, onChunkProgress, resolve, trigger); }); return { sendPromise, - abort: () => abortChunkedRequest(state, item), + abort: () => abortChunkedRequest(chunkedState, item), }; }; @@ -58,24 +62,11 @@ const processChunks = ( trigger: TriggerMethod ): SendResult => { const chunks = getChunks(item, chunkedOptions, sendOptions.startByte); - logger.debugLog(`chunkedSender: created ${chunks.length} chunks for: ${item.file.name}`); + const chunkedState = getChunkedState(chunks, url, sendOptions, chunkedOptions); - const state = { - finished: false, - aborted: false, - error: false, - uploaded: {}, - requests: {}, - responses: [], - chunkCount: chunks.length, - startByte: sendOptions.startByte || 0, - chunks, - url, - sendOptions, - ...chunkedOptions, - }; + logger.debugLog(`chunkedSender: created ${chunks.length} chunks for: ${item.file.name}`); - const { sendPromise, abort } = process(state, item, onProgress, trigger); + const { sendPromise, abort } = process(chunkedState, item, onProgress, trigger); return { request: sendPromise, diff --git a/packages/core/chunked-sender/src/chunkedSender/sendChunk.js b/packages/core/chunked-sender/src/chunkedSender/sendChunk.js index 96ab9d2e3..bf865fbf0 100644 --- a/packages/core/chunked-sender/src/chunkedSender/sendChunk.js +++ b/packages/core/chunked-sender/src/chunkedSender/sendChunk.js @@ -15,7 +15,7 @@ import type { BatchItem } from "@rpldy/shared"; import type { OnProgress, SendResult } from "@rpldy/sender"; import type { TriggerMethod } from "@rpldy/life-events"; import type { ChunkStartEventData } from "../types"; -import type { Chunk, State } from "./types"; +import type { Chunk, ChunkedState } from "./types"; import ChunkedSendError from "./ChunkedSendError"; const getContentRangeValue = (chunk, data, item) => @@ -36,15 +36,17 @@ const getSkippedResult = (): SendResult => ({ const uploadChunkWithUpdatedData = ( chunk: Chunk, - state: State, + chunkedState: ChunkedState, item: BatchItem, onProgress: OnProgress, trigger: TriggerMethod, ): Promise => { + const state = chunkedState.getState(); + const unwrappedOptions = unwrap(state.sendOptions); const sendOptions = { - ...unwrap(state.sendOptions), + ...unwrappedOptions, headers: { - ...state.sendOptions.headers, + ...unwrappedOptions.headers, "Content-Range": getContentRangeValue(chunk, chunk.data, item), } }; @@ -60,12 +62,13 @@ const uploadChunkWithUpdatedData = ( return triggerUpdater(trigger, CHUNK_EVENTS.CHUNK_START, { item: unwrap(item), chunk: pick(chunk, ["id", "start", "end", "index", "attempt"]), - chunkItem: chunkItem, + chunkItem: { ...chunkItem }, sendOptions, url: state.url, chunkIndex, remainingCount: state.chunks.length, totalCount: state.chunkCount, + //TODO: should expose chunk_progress event instead of passing callback like this onProgress, }) // $FlowFixMe - https://github.com/facebook/flow/issues/8215 @@ -81,14 +84,14 @@ const uploadChunkWithUpdatedData = ( getSkippedResult() : xhrSend([chunkItem], updatedData?.url || state.url, - mergeWithUndefined({}, sendOptions, updatedData && updatedData.sendOptions), + mergeWithUndefined({}, sendOptions, updatedData?.sendOptions), onChunkProgress); }); }; const sendChunk = ( chunk: Chunk, - state: State, + chunkedState: ChunkedState, item: BatchItem, onProgress: OnProgress, trigger: TriggerMethod, @@ -102,9 +105,11 @@ const sendChunk = ( throw new ChunkedSendError("chunk failure - failed to slice"); } - logger.debugLog(`chunkedSender.sendChunk: about to send chunk ${chunk.id} [${chunk.start}-${chunk.end}] to: ${state.url || ""}`); + const url = chunkedState.getState().url; - const chunkXhrRequest = uploadChunkWithUpdatedData(chunk, state, item, onProgress, trigger); + logger.debugLog(`chunkedSender.sendChunk: about to send chunk ${chunk.id} [${chunk.start}-${chunk.end}] to: ${url || ""}`); + + const chunkXhrRequest = uploadChunkWithUpdatedData(chunk, chunkedState, item, onProgress, trigger); const abort = () => { chunkXhrRequest.then(({ abort }) => abort()); diff --git a/packages/core/chunked-sender/src/chunkedSender/sendChunks.js b/packages/core/chunked-sender/src/chunkedSender/sendChunks.js index 7e051c84a..f18fe404d 100644 --- a/packages/core/chunked-sender/src/chunkedSender/sendChunks.js +++ b/packages/core/chunked-sender/src/chunkedSender/sendChunks.js @@ -8,7 +8,7 @@ import sendChunk from "./sendChunk"; import type { BatchItem, FileState } from "@rpldy/shared"; import type { OnProgress } from "@rpldy/sender"; import type { TriggerMethod } from "@rpldy/life-events"; -import type { Chunk, State } from "./types"; +import type { Chunk, ChunkedState } from "./types"; const resolveOnError = (resolve, ex) => { if (ex instanceof ChunkedSendError) { @@ -24,31 +24,34 @@ const resolveOnError = (resolve, ex) => { } }; -const finalizeOnFinish = (state: State, item: BatchItem, resolve, status: FileState) => { - state.finished = true; +const finalizeOnFinish = (chunkedState: ChunkedState, item: BatchItem, resolve, status: FileState) => { + chunkedState.updateState((state) => { + state.finished = true; + }); resolve({ state: status, - response: { results: state.responses }, + response: { results: chunkedState.getState().responses }, }); }; -const resolveOnAllChunksFinished = (state: State, item: BatchItem, resolve): boolean => { +const resolveOnAllChunksFinished = (chunkedState: ChunkedState, item: BatchItem, resolve): boolean => { + const state = chunkedState.getState(); const finished = !state.chunks.length; if (state.aborted) { logger.debugLog(`chunkedSender: chunked upload aborted for item: ${item.id}`); - finalizeOnFinish(state, item, resolve, FILE_STATES.ABORTED); + finalizeOnFinish(chunkedState, item, resolve, FILE_STATES.ABORTED); } else if (finished && !state.error) { logger.debugLog(`chunkedSender: chunked upload finished for item: ${item.id}`, state.responses); - finalizeOnFinish(state, item, resolve, FILE_STATES.FINISHED); + finalizeOnFinish(chunkedState, item, resolve, FILE_STATES.FINISHED); } return finished || state.error; }; export const handleChunk = ( - state: State, + chunkedState: ChunkedState, item: BatchItem, onProgress: OnProgress, chunkResolve: (any) => void, @@ -57,15 +60,15 @@ export const handleChunk = ( ): Promise => new Promise((resolve, reject) => { try { - const chunkSendResult = sendChunk(chunk, state, item, onProgress, trigger); + const chunkSendResult = sendChunk(chunk, chunkedState, item, onProgress, trigger); - handleChunkRequest(state, item, chunk.id, chunkSendResult, trigger, onProgress) + handleChunkRequest(chunkedState, item, chunk.id, chunkSendResult, trigger, onProgress) .then(() => { resolve(); - if (!resolveOnAllChunksFinished(state, item, chunkResolve)) { + if (!resolveOnAllChunksFinished(chunkedState, item, chunkResolve)) { //not finished - continue sending remaining chunks - sendChunks(state, item, onProgress, chunkResolve, trigger); + sendChunks(chunkedState, item, onProgress, chunkResolve, trigger); } }); } catch (ex) { @@ -74,12 +77,14 @@ export const handleChunk = ( }); const sendChunks = ( - state: State, + chunkedState: ChunkedState, item: BatchItem, onProgress: OnProgress, resolve: (any) => void, trigger: TriggerMethod, ) => { + const state = chunkedState.getState(); + if (!state.finished && !state.aborted) { const inProgress = Object.keys(state.requests).length; @@ -89,16 +94,19 @@ const sendChunks = ( let chunks; try { - chunks = getChunksToSend(state); + chunks = getChunksToSend(chunkedState); } catch (ex) { resolveOnError(resolve, ex); } if (chunks) { chunks.forEach((chunk) => { - handleChunk(state, item, onProgress, resolve, chunk, trigger) + handleChunk(chunkedState, item, onProgress, resolve, chunk, trigger) .catch((ex) => { - state.error = true; + chunkedState.updateState((state) => { + state.error = true; + }); + resolveOnError(resolve, ex); }); }); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/index.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/createChunkedSender.test.js similarity index 97% rename from packages/core/chunked-sender/src/chunkedSender/tests/index.test.js rename to packages/core/chunked-sender/src/chunkedSender/tests/createChunkedSender.test.js index df3ebf745..65d520711 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/index.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/createChunkedSender.test.js @@ -1,6 +1,6 @@ import send from "@rpldy/sender"; import processChunks from "../processChunks"; -import createChunkedSender from "../"; +import createChunkedSender from "../createChunkedSender"; jest.mock("@rpldy/sender", () => jest.fn()); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/getChunkedState.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/getChunkedState.test.js new file mode 100644 index 000000000..69f2a7929 --- /dev/null +++ b/packages/core/chunked-sender/src/chunkedSender/tests/getChunkedState.test.js @@ -0,0 +1,26 @@ +import getChunkedState from "../getChunkedState"; + +jest.mock("@rpldy/simple-state", () => (state) => ({ + state, + update: (updater) => updater(state), +})); + +describe("getChunkedState tests", () => { + it("should return chunked state", () => { + const chunkedState = getChunkedState([{}, {}], "test.com", { method: "PUT", startByte: 3 }, { chunked: true }); + + const state = chunkedState.getState(); + + expect(state.chunks).toHaveLength(2); + expect(state.url).toBe("test.com"); + + let updateCnt = 0; + + chunkedState.updateState((state) => { + updateCnt++; + expect(state.startByte).toBe(3); + }); + + expect(updateCnt).toBe(1); + }); +}); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/getChunks.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/getChunks.test.js index e00db8149..e6d63fd9c 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/getChunks.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/getChunks.test.js @@ -2,9 +2,7 @@ import getChunks from "../getChunks"; import ChunkedSendError from "../ChunkedSendError"; describe("getChunks tests", () => { - it("should return chunks for file", () => { - const chunks = getChunks( { id: "bi1", file: { size: 999 } }, { chunkSize: 201 }); @@ -30,7 +28,6 @@ describe("getChunks tests", () => { }); it("should return one chunk if file smaller than chunk size", () => { - const chunks = getChunks( { id: "bi1", file: { size: 499 } }, { chunkSize: 500 }); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/getChunksToSend.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/getChunksToSend.test.js index da2ac8a20..7ca87d2a9 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/getChunksToSend.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/getChunksToSend.test.js @@ -1,56 +1,54 @@ import ChunkedSendError from "../ChunkedSendError"; import getChunksToSend from "../getChunksToSend"; +import getChunkedState from "./mocks/getChunkedState.mock"; describe("getChunkToSend tests", () => { - it("should return 0 chunks if already in progress and no parallel", () => { - const chunks = getChunksToSend({ + const chunks = getChunksToSend(getChunkedState({ requests: { "c1": {} }, chunks: [1, 2] - }); + })); expect(chunks).toHaveLength(0); }); it("should return 0 chunks if no chunks left", () => { - const chunks = getChunksToSend({ + const chunks = getChunksToSend(getChunkedState({ requests: {}, chunks: [], - }); + })); expect(chunks).toHaveLength(0); }); it("should return 1 chunk to send for no parallel and no in progress", () => { - - const chunks = getChunksToSend({ + const chunks = getChunksToSend(getChunkedState({ requests: {}, chunks: [1, 2, 3], parallel: 1, - }); + })); expect(chunks).toEqual([1]); }); it("should return chunk that isnt active when parallel > 1", () => { - const chunks = getChunksToSend({ + const chunks = getChunksToSend(getChunkedState({ requests: { 1: {} }, chunks: [{ id: "1" }, { id: "2" }], parallel: 2, - }); + })); expect(chunks).toEqual([{ id: "2" }]); }); it("should return multiple chunks to send if parallel > 1", () => { - const chunks = [{ id: "1" }, { id: "2" }]; - const sendChunks = getChunksToSend({ + const sendChunks = getChunksToSend(getChunkedState({ requests: {}, chunks: [...chunks, { id: "3" }], parallel: 2, - }); + })); expect(sendChunks).toEqual(chunks); }); @@ -58,22 +56,22 @@ describe("getChunkToSend tests", () => { it("should return chunk while its below attempts limit", () => { const chunk = { id: "3", attempt: 1 }; - const chunks = getChunksToSend({ + const chunks = getChunksToSend(getChunkedState({ requests: {}, retries: 2, chunks: [chunk], - }); + })); expect(chunks).toEqual([chunk]); }); it("should throw ChunkedSendError if chunk is over the attempt limit", () => { expect(() => { - getChunksToSend({ + getChunksToSend(getChunkedState({ requests: {}, retries: 1, chunks: [{ id: "3", attempt: 1 }], - }); + })); }).toThrow(ChunkedSendError); }); -}); \ No newline at end of file +}); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/handleChunkRequest.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/handleChunkRequest.test.js index d0abceeb7..1768226f7 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/handleChunkRequest.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/handleChunkRequest.test.js @@ -1,6 +1,7 @@ import { FILE_STATES } from "@rpldy/shared"; import handleChunkRequest from "../handleChunkRequest"; import { CHUNK_EVENTS } from "../../consts"; +import getChunkedState from "./mocks/getChunkedState.mock"; describe("handleChunkRequest tests", () => { @@ -17,14 +18,14 @@ describe("handleChunkRequest tests", () => { file: { size: 2000 } }; - const state = { + const state = getChunkedState({ requests: { "c1": {}, "c2": {} }, chunks: chunks.slice(), responses: [] - }; + }); const sendResult = { request: new Promise((resolve) => { @@ -38,14 +39,14 @@ describe("handleChunkRequest tests", () => { const test = handleChunkRequest(state, item, "c1", sendResult, trigger, onProgress); - expect(state.requests.c1.id).toBe("c1"); - expect(state.requests.c1.abort).toBeInstanceOf(Function); + expect(state.getState().requests.c1.id).toBe("c1"); + expect(state.getState().requests.c1.abort).toBeInstanceOf(Function); jest.runAllTimers(); await test; - return { state, item, onProgress, chunks }; + return { state: state.getState(), item, onProgress, chunks }; }; it("should handle send success", async () => { diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/mocks/getChunkedState.mock.js b/packages/core/chunked-sender/src/chunkedSender/tests/mocks/getChunkedState.mock.js new file mode 100644 index 000000000..7fb353358 --- /dev/null +++ b/packages/core/chunked-sender/src/chunkedSender/tests/mocks/getChunkedState.mock.js @@ -0,0 +1,14 @@ +const getChunkedState = (state = {}) => { + const getState = jest.fn(() => state); + + const updateState = jest.fn((updater) => { + updater(state); + }); + + return { + getState, + updateState, + }; +} + +export default getChunkedState; diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/processChunkProgressData.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/processChunkProgressData.test.js index 86bafe19e..aecf44dcb 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/processChunkProgressData.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/processChunkProgressData.test.js @@ -1,15 +1,15 @@ import processChunkProgressData from "../processChunkProgressData"; +import getChunkedState from "./mocks/getChunkedState.mock"; describe("processChunkProgressData test", () => { - it("should return loaded sum for chunks", () => { - const { loaded, total } = processChunkProgressData({ + const { loaded, total } = processChunkProgressData(getChunkedState({ uploaded: { c1: 200, c2: 400, }, startByte: 1, - }, { + }), { file: { size: 3000 } }, "c3", 1000); @@ -18,13 +18,13 @@ describe("processChunkProgressData test", () => { }); it("should return total if loaded sum is bigger", () => { - const { loaded, total } = processChunkProgressData({ + const { loaded, total } = processChunkProgressData(getChunkedState({ uploaded: { c1: 1000, c2: 1000, }, startByte: 1, - }, { + }), { file: { size: 3000 } }, "c3", 1000); @@ -33,14 +33,14 @@ describe("processChunkProgressData test", () => { }); it("should not add chunk loaded if state value bigger", () => { - const { loaded, total } = processChunkProgressData({ + const { loaded, total } = processChunkProgressData(getChunkedState({ uploaded: { c1: 1000, c2: 1000, c3: 999, }, startByte: 0, - }, { + }), { file: { size: 3000 } }, "c3", 998); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/processChunks.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/processChunks.test.js index e405bf225..b9a8cbb35 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/processChunks.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/processChunks.test.js @@ -1,13 +1,23 @@ +import createState from "@rpldy/simple-state"; +import getChunkedState from "./mocks/getChunkedState.mock"; import getChunks from "../getChunks"; import sendChunks from "../sendChunks"; import processChunks, { process, abortChunkedRequest } from "../processChunks"; +jest.mock("@rpldy/simple-state"); jest.mock("lodash", () => ({ throttle: (fn) => fn })); //doesnt work :( jest.mock("../getChunks", () => jest.fn()); jest.mock("../sendChunks", () => jest.fn()); describe("processChunks tests", () => { + beforeAll(()=>{ + createState.mockImplementation((state) => ({ + state, + update: jest.fn((updater) => updater(state)), + })); + }); + beforeEach(() => { jest.useRealTimers(); @@ -33,27 +43,29 @@ describe("processChunks tests", () => { expect(result.abort).toBeInstanceOf(Function); expect(result.request).toBeInstanceOf(Promise); - expect(sendChunks).toHaveBeenCalledWith({ - finished: false, - aborted: false, + expect(sendChunks).toHaveBeenCalledWith(expect.any(Object), item, expect.any(Function), expect.any(Function), expect.any(Function)); + + expect(sendChunks.mock.calls[0][0].getState()).toStrictEqual({ + finished: false, + aborted: false, error: false, - uploaded: {}, - requests: {}, - responses: [], + uploaded: {}, + requests: {}, + responses: [], chunkCount: chunks.length, - chunks, - url, - sendOptions, + chunks, + url, + sendOptions, startByte: 0, - ...chunkedOptions, - }, item, expect.any(Function), expect.any(Function), expect.any(Function)); + ...chunkedOptions, + }); }); describe("process tests", () => { it("should send chunks and handle progress", () => { jest.useFakeTimers(); //using fake timers coz for some reason lodash isnt mocked... :( - const state = { + const state = getChunkedState({ uploaded: {}, chunks: [ { @@ -61,7 +73,7 @@ describe("processChunks tests", () => { } ], startByte: 4, - }; + }); const item = { file: { size: 1000 }}; const onProgress = jest.fn(); @@ -102,12 +114,12 @@ describe("processChunks tests", () => { it("should call abort on chunks", () => { const abort = jest.fn(); - const state = { + const state = getChunkedState({ requests: { c1: { abort, }, c2: { abort, }, }, - }; + }); const result = process(state, {}, ); @@ -115,7 +127,7 @@ describe("processChunks tests", () => { expect(abort).toHaveBeenCalledTimes(2); - expect(state.aborted).toBe(true); + expect(state.getState().aborted).toBe(true); }); }); @@ -123,28 +135,28 @@ describe("processChunks tests", () => { it("should do nothing for finished request", () => { const abort = jest.fn(); - const state = { + const state = getChunkedState({ finished: true, requests: { c1: { abort, } } - }; + }); abortChunkedRequest(state, {}); expect(abort).not.toHaveBeenCalled(); - expect(state.aborted).toBeFalsy(); + expect(state.getState().aborted).toBeFalsy(); }); it("should do nothing for aborted request", () => { const abort = jest.fn(); - const state = { + const state = getChunkedState({ finished: true, requests: { c1: { abort, } } - }; + }); abortChunkedRequest(state, {}); @@ -152,22 +164,21 @@ describe("processChunks tests", () => { }); it("should abort requests for in progress request", () => { - const abort = jest.fn(); - const state = { + const state = getChunkedState({ requests: { c1: { abort, }, c2: { abort, }, c3: { abort, }, } - }; + }); abortChunkedRequest(state, {}); expect(abort).toHaveBeenCalledTimes(3); - expect(state.aborted).toBe(true); + expect(state.getState().aborted).toBe(true); }); }); }); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/sendChunk.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/sendChunk.test.js index 5add371b4..e76607aa3 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/sendChunk.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/sendChunk.test.js @@ -10,13 +10,13 @@ import ChunkedSendError from "../ChunkedSendError"; import { getChunkDataFromFile } from "../../utils"; import { CHUNK_EVENTS } from "../../consts"; import sendChunk from "../sendChunk"; +import getChunkedState from "./mocks/getChunkedState.mock"; jest.mock("@rpldy/sender", () => jest.fn()); jest.mock("../../utils", () => ({ getChunkDataFromFile: jest.fn() })); -jest.mock("@rpldy/simple-state"); //, () =>({unwrap: jest.fn()})); +jest.mock("@rpldy/simple-state"); describe("sendChunk tests", () => { - const xhrSendResult = { request: Promise.resolve({ xhrSend: true }), abort: jest.fn() }; const onProgress = jest.fn(); @@ -54,7 +54,9 @@ describe("sendChunk tests", () => { createBatchItem.mockReturnValueOnce(chunkItem); xhrSend.mockResolvedValueOnce(xhrSendResult); - const sendResult = sendChunk(chunk, { url, sendOptions, chunks, chunkCount: 4 }, { file }, onProgress, trigger); + const state = getChunkedState({ url, sendOptions, chunks, chunkCount: 4 }); + + const sendResult = sendChunk(chunk, state, { file }, onProgress, trigger); const result = await sendResult.request; @@ -64,10 +66,11 @@ describe("sendChunk tests", () => { expect(createBatchItem).toHaveBeenCalledWith(fileData, "c1"); + expect(unwrap).toHaveBeenCalledWith(sendOptions); + const updatedSendOptions = { unwrapped: true, headers: { - ...sendOptions.headers, "Content-Range": `bytes 1-${fileData.size}/400`, } }; @@ -146,7 +149,6 @@ describe("sendChunk tests", () => { }); it("should throw if failed to slice chunk", () => { - getChunkDataFromFile.mockReturnValueOnce(null); const chunk = { id: "c1", start: 1, end: 10, data: null }; @@ -163,13 +165,14 @@ describe("sendChunk tests", () => { sendOptions = { method: "POST", headers: { "x-test": 123 } }; it("should call abort successfully - before request is used", async () => { - xhrSend.mockResolvedValueOnce(xhrSendResult); triggerUpdater .mockResolvedValueOnce({ }); - const sendResult = sendChunk(chunk, { url, sendOptions, chunks: [chunk] }, { file }, onProgress); + const state = getChunkedState({ url, sendOptions, chunks: [chunk] }); + + const sendResult = sendChunk(chunk, state, { file }, onProgress); await sendResult.abort(); @@ -180,13 +183,14 @@ describe("sendChunk tests", () => { }); it("should call abort successfully - after request is used", async() => { - xhrSend.mockResolvedValueOnce(xhrSendResult); triggerUpdater .mockResolvedValueOnce({ }); - const sendResult = sendChunk(chunk, { url, sendOptions, chunks: [chunk] }, { file }, onProgress); + const state = getChunkedState({ url, sendOptions, chunks: [chunk] }); + + const sendResult = sendChunk(chunk, state, { file }, onProgress); await sendResult.request; await sendResult.abort(); diff --git a/packages/core/chunked-sender/src/chunkedSender/tests/sendChunks.test.js b/packages/core/chunked-sender/src/chunkedSender/tests/sendChunks.test.js index cead5f6f3..9482a9c16 100644 --- a/packages/core/chunked-sender/src/chunkedSender/tests/sendChunks.test.js +++ b/packages/core/chunked-sender/src/chunkedSender/tests/sendChunks.test.js @@ -4,6 +4,7 @@ import handleChunkRequest from "../handleChunkRequest"; import getChunksToSend from "../getChunksToSend"; import sendChunk from "../sendChunk"; import sendChunks, { handleChunk } from "../sendChunks"; +import getChunkedState from "./mocks/getChunkedState.mock"; jest.mock("../handleChunkRequest", () => jest.fn()); jest.mock("../getChunksToSend", () => jest.fn()); @@ -20,29 +21,28 @@ describe("sendChunks tests", () => { }); it("should do nothing if finished", () => { - sendChunks({ finished: true }); + sendChunks(getChunkedState({ finished: true })); expect(getChunksToSend).not.toHaveBeenCalled(); }); it("should do nothing if aborted", () => { - sendChunks({ aborted: true }); + sendChunks(getChunkedState({ aborted: true })); expect(getChunksToSend).not.toHaveBeenCalled(); }); it("should do nothing if parallel not allowed", () => { - sendChunks({ requests: { c1: {} }, parallel: 1 }); + sendChunks(getChunkedState({ requests: { c1: {} }, parallel: 1 })); expect(getChunksToSend).not.toHaveBeenCalled(); }); it("should resolve with chunk failed error", () => { - getChunksToSend.mockImplementationOnce(() => { throw new ChunkedSendError(); }); const resolve = jest.fn(); - sendChunks({ requests: {} }, null, null, resolve); + sendChunks(getChunkedState({ requests: {} }), null, null, resolve); expect(resolve).toHaveBeenCalledWith({ state: FILE_STATES.ERROR, @@ -57,7 +57,7 @@ describe("sendChunks tests", () => { const resolve = jest.fn(); - sendChunks({ requests: {} }, null, null, resolve); + sendChunks(getChunkedState({ requests: {} }), null, null, resolve); expect(resolve).toHaveBeenCalledWith({ state: FILE_STATES.ERROR, @@ -87,13 +87,13 @@ describe("sendChunks tests", () => { const noop = () => {}; resolve = resolve || noop; - state = { + state = getChunkedState({ chunks: [], requests: {}, url: "test.com", sendOptions, ...state, - }; + }); const trigger = noop(); @@ -146,16 +146,15 @@ describe("sendChunks tests", () => { }); describe("handleChunk tests", () => { - it("should handle chunk and finish", async () => { const result = {}; sendChunk.mockReturnValueOnce(result); handleChunkRequest.mockResolvedValueOnce(); - const state = { + const state = getChunkedState({ chunks: [], responses: [], - }; + }); const chunkId = "c1"; @@ -169,11 +168,11 @@ describe("sendChunks tests", () => { expect(handleChunkRequest).toHaveBeenCalledWith(state, item, chunkId, result, trigger, onProgress); - expect(state.finished).toBe(true); + expect(state.getState().finished).toBe(true); expect(resolve).toHaveBeenCalledWith({ state: FILE_STATES.FINISHED, - response: { results: state.responses }, + response: { results: state.getState().responses }, }); expect(getChunksToSend).not.toHaveBeenCalled(); @@ -186,34 +185,33 @@ describe("sendChunks tests", () => { const chunkId = "c1"; - const state = { + const state = getChunkedState({ aborted: true, chunks: [], responses: ["aborted"], - }; + }); const resolve = jest.fn(); await handleChunk(state, {}, {}, resolve, { id: chunkId }, { }); - expect(state.finished).toBe(true); + expect(state.getState().finished).toBe(true); expect(resolve).toHaveBeenCalledWith({ state: FILE_STATES.ABORTED, - response: { results: state.responses }, + response: { results: state.getState().responses }, }); expect(getChunksToSend).not.toHaveBeenCalled(); }); it("should handle chunk and send more", async () => { - handleChunkRequest.mockResolvedValueOnce(); - const state = { + const state = getChunkedState({ requests: {}, chunks: [1, 2], - }; + }); const trigger = jest.fn(); const resolve = jest.fn(); @@ -222,7 +220,7 @@ describe("sendChunks tests", () => { await handleChunk(state, {}, {}, resolve, chunk, trigger); - expect(state.finished).toBeFalsy(); + expect(state.getState().finished).toBeFalsy(); expect(resolve).not.toHaveBeenCalled(); expect(getChunksToSend).toHaveBeenCalledWith(state); expect(sendChunk).toHaveBeenCalledTimes(1); diff --git a/packages/core/chunked-sender/src/chunkedSender/types.js b/packages/core/chunked-sender/src/chunkedSender/types.js index 78216eddc..f070673fb 100644 --- a/packages/core/chunked-sender/src/chunkedSender/types.js +++ b/packages/core/chunked-sender/src/chunkedSender/types.js @@ -1,5 +1,4 @@ // @flow - import type { UploadData } from "@rpldy/shared"; import type { SendOptions } from "@rpldy/sender"; import type { MandatoryChunkedOptions } from "../types"; @@ -29,6 +28,14 @@ export type State = {| startByte: number, |}; +type UpdateStateMethod = ((State) => void) => void; +type GetStateMethod = () => State; + +export type ChunkedState = {| + getState: GetStateMethod, + updateState: UpdateStateMethod, +|}; + export type ChunksSendResponse = { sendPromise: Promise, abort: () => boolean diff --git a/yarn.lock b/yarn.lock index 11453e1d1..17e1c3791 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3200,16 +3200,6 @@ node-gyp "^7.1.0" read-package-json-fast "^2.0.1" -"@npmcli/run-script@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@npmcli/run-script/-/run-script-2.0.0.tgz#9949c0cab415b17aaac279646db4f027d6f1e743" - integrity sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig== - dependencies: - "@npmcli/node-gyp" "^1.0.2" - "@npmcli/promise-spawn" "^1.3.2" - node-gyp "^8.2.0" - read-package-json-fast "^2.0.1" - "@octokit/auth-token@^2.4.4": version "2.5.0" resolved "https://registry.yarnpkg.com/@octokit/auth-token/-/auth-token-2.5.0.tgz#27c37ea26c205f28443402477ffd261311f21e36" @@ -10105,7 +10095,7 @@ good-listener@^1.2.2: dependencies: delegate "^3.1.2" -graceful-fs@^4.1.11, graceful-fs@^4.2.6: +graceful-fs@^4.1.11: version "4.2.8" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a" integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg== @@ -10683,13 +10673,6 @@ ignore-walk@3.0.4, ignore-walk@^3.0.3: dependencies: minimatch "^3.0.4" -ignore-walk@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-4.0.1.tgz#fc840e8346cf88a3a9380c5b17933cd8f4d39fa3" - integrity sha512-rzDQLaW4jQbh2YrOFlJdCtX8qgJTehFRYiUB2r1osqTeDzV/3+Jh8fz1oAPzUThf3iku8Ds4IDqawI5d8mUiQw== - dependencies: - minimatch "^3.0.4" - ignore@^4.0.3, ignore@^4.0.6: version "4.0.6" resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" @@ -12818,7 +12801,7 @@ make-fetch-happen@^8.0.9: socks-proxy-agent "^5.0.0" ssri "^8.0.0" -make-fetch-happen@^9.0.1, make-fetch-happen@^9.1.0: +make-fetch-happen@^9.0.1: version "9.1.0" resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz#53085a09e7971433e6765f7971bf63f4e05cb968" integrity sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg== @@ -13559,22 +13542,6 @@ node-gyp@^7.1.0: tar "^6.0.2" which "^2.0.2" -node-gyp@^8.2.0: - version "8.4.0" - resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-8.4.0.tgz#6e1112b10617f0f8559c64b3f737e8109e5a8338" - integrity sha512-Bi/oCm5bH6F+FmzfUxJpPaxMEyIhszULGR3TprmTeku8/dMFcdTcypk120NeZqEt54r1BrgEKtm2jJiuIKE28Q== - dependencies: - env-paths "^2.2.0" - glob "^7.1.4" - graceful-fs "^4.2.6" - make-fetch-happen "^9.1.0" - nopt "^5.0.0" - npmlog "^4.1.2" - rimraf "^3.0.2" - semver "^7.3.5" - tar "^6.1.2" - which "^2.0.2" - node-int64@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" @@ -13752,16 +13719,6 @@ npm-packlist@^2.1.4: npm-bundled "^1.1.1" npm-normalize-package-bin "^1.0.1" -npm-packlist@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-3.0.0.tgz#0370df5cfc2fcc8f79b8f42b37798dd9ee32c2a9" - integrity sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ== - dependencies: - glob "^7.1.6" - ignore-walk "^4.0.1" - npm-bundled "^1.1.1" - npm-normalize-package-bin "^1.0.1" - npm-pick-manifest@^6.0.0, npm-pick-manifest@^6.1.1: version "6.1.1" resolved "https://registry.yarnpkg.com/npm-pick-manifest/-/npm-pick-manifest-6.1.1.tgz#7b5484ca2c908565f43b7f27644f36bb816f5148" @@ -14330,31 +14287,6 @@ pacote@^11.2.6: ssri "^8.0.1" tar "^6.1.0" -pacote@^12.0.2: - version "12.0.2" - resolved "https://registry.yarnpkg.com/pacote/-/pacote-12.0.2.tgz#14ae30a81fe62ec4fc18c071150e6763e932527c" - integrity sha512-Ar3mhjcxhMzk+OVZ8pbnXdb0l8+pimvlsqBGRNkble2NVgyqOGE3yrCGi/lAYq7E7NRDMz89R1Wx5HIMCGgeYg== - dependencies: - "@npmcli/git" "^2.1.0" - "@npmcli/installed-package-contents" "^1.0.6" - "@npmcli/promise-spawn" "^1.2.0" - "@npmcli/run-script" "^2.0.0" - cacache "^15.0.5" - chownr "^2.0.0" - fs-minipass "^2.1.0" - infer-owner "^1.0.4" - minipass "^3.1.3" - mkdirp "^1.0.3" - npm-package-arg "^8.0.1" - npm-packlist "^3.0.0" - npm-pick-manifest "^6.0.0" - npm-registry-fetch "^11.0.0" - promise-retry "^2.0.1" - read-package-json-fast "^2.0.1" - rimraf "^3.0.2" - ssri "^8.0.1" - tar "^6.1.0" - pako@~1.0.5: version "1.0.11" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" @@ -17190,18 +17122,6 @@ tar@^6.0.2, tar@^6.1.0: mkdirp "^1.0.3" yallist "^4.0.0" -tar@^6.1.2: - version "6.1.11" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" - integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^3.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - teeny-request@7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-7.1.1.tgz#2b0d156f4a8ad81de44303302ba8d7f1f05e20e6" @@ -17673,16 +17593,16 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= -typescript@^3.9.5: - version "3.9.9" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.9.tgz#e69905c54bc0681d0518bd4d587cc6f2d0b1a674" - integrity sha512-kdMjTiekY+z/ubJCATUPlRDl39vXYiMV9iyeMuEuXZh2we6zz80uovNN2WlAxmmdE/Z/YQe+EbOEXB5RHEED3w== - typescript@^4.4.3: version "4.4.4" resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c" integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA== +typescript@^4.5.5: + version "4.5.5" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.5.5.tgz#d8c953832d28924a9e3d37c73d729c846c5896f3" + integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== + uc.micro@^1.0.1, uc.micro@^1.0.5: version "1.0.6" resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-1.0.6.tgz#9c411a802a409a91fc6cf74081baba34b24499ac"