From f62373f9a3c8eeb94572be95b3a7a5be4607e948 Mon Sep 17 00:00:00 2001 From: Andrei Iurko Date: Tue, 10 Dec 2024 18:38:06 +0300 Subject: [PATCH] :bug: Accept push-fixes for prMode (QD-9962) --- scan/dist/index.js | 58178 +++++++++++++++++----------------- scan/src/main.ts | 12 - scan/src/utils.ts | 7 + vsts/vss-extension.dev.json | 2 +- 4 files changed, 29097 insertions(+), 29102 deletions(-) diff --git a/scan/dist/index.js b/scan/dist/index.js index f52f15d7..bf1de473 100644 --- a/scan/dist/index.js +++ b/scan/dist/index.js @@ -20459,5991 +20459,1840 @@ Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } }); -// ../node_modules/@actions/github/lib/context.js -var require_context = __commonJS({ - "../node_modules/@actions/github/lib/context.js"(exports2) { +// ../common/cli.json +var version, checksum; +var init_cli = __esm({ + "../common/cli.json"() { + version = "2024.3.2"; + checksum = { + windows_x86_64: "d2260f5d2cfd18c0795adcaf7ff57f77d2e7f9d0ce888531c443923388262eec", + linux_arm64: "8086568b6c62fb7f109c33d8f04f68cf060db4cd260a46d5f581fb093474ecc7", + darwin_arm64: "03962c7c513b6a1ca67164bbcb65f8673a60cff6fe13f618598439b45d352084", + darwin_x86_64: "e70904e0c4fc1d56a8d5f1e2682ccbe168b41b973dac2c5fe42f9b31837dbd86", + windows_arm64: "940e9a84b03db0b5e67f93299879e804984b3893577a58b2424dbc5c2db39cbe", + linux_x86_64: "a7a47898971fe2c7db08d10afc40292736df33e9b33d9d84b8672f3ec246da04" + }; + } +}); + +// ../node_modules/process-nextick-args/index.js +var require_process_nextick_args = __commonJS({ + "../node_modules/process-nextick-args/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Context = void 0; - var fs_1 = require("fs"); - var os_1 = require("os"); - var Context = class { - static { - __name(this, "Context"); + if (typeof process === "undefined" || !process.version || process.version.indexOf("v0.") === 0 || process.version.indexOf("v1.") === 0 && process.version.indexOf("v1.8.") !== 0) { + module2.exports = { nextTick }; + } else { + module2.exports = process; + } + function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== "function") { + throw new TypeError('"callback" argument must be a function'); } - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); - } else { - const path2 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path2} does not exist${os_1.EOL}`); + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(/* @__PURE__ */ __name(function afterTickOne() { + fn.call(null, arg1); + }, "afterTickOne")); + case 3: + return process.nextTick(/* @__PURE__ */ __name(function afterTickTwo() { + fn.call(null, arg1, arg2); + }, "afterTickTwo")); + case 4: + return process.nextTick(/* @__PURE__ */ __name(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }, "afterTickThree")); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + return process.nextTick(/* @__PURE__ */ __name(function afterTick() { + fn.apply(null, args); + }, "afterTick")); } + } + __name(nextTick, "nextTick"); + } +}); + +// ../node_modules/jszip/node_modules/isarray/index.js +var require_isarray = __commonJS({ + "../node_modules/jszip/node_modules/isarray/index.js"(exports2, module2) { + var toString = {}.toString; + module2.exports = Array.isArray || function(arr) { + return toString.call(arr) == "[object Array]"; }; - exports2.Context = Context; } }); -// ../node_modules/@actions/github/lib/internal/utils.js -var require_utils3 = __commonJS({ - "../node_modules/@actions/github/lib/internal/utils.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; +// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/stream.js +var require_stream = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/stream.js"(exports2, module2) { + module2.exports = require("stream"); + } +}); + +// ../node_modules/jszip/node_modules/safe-buffer/index.js +var require_safe_buffer = __commonJS({ + "../node_modules/jszip/node_modules/safe-buffer/index.js"(exports2, module2) { + var buffer = require("buffer"); + var Buffer2 = buffer.Buffer; + function copyProps(src, dst) { + for (var key in src) { + dst[key] = src[key]; } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __name(copyProps, "copyProps"); + if (Buffer2.from && Buffer2.alloc && Buffer2.allocUnsafe && Buffer2.allocUnsafeSlow) { + module2.exports = buffer; + } else { + copyProps(buffer, exports2); + exports2.Buffer = SafeBuffer; + } + function SafeBuffer(arg, encodingOrOffset, length) { + return Buffer2(arg, encodingOrOffset, length); + } + __name(SafeBuffer, "SafeBuffer"); + copyProps(Buffer2, SafeBuffer); + SafeBuffer.from = function(arg, encodingOrOffset, length) { + if (typeof arg === "number") { + throw new TypeError("Argument must not be a number"); } - __setModuleDefault3(result, mod); - return result; + return Buffer2(arg, encodingOrOffset, length); }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + SafeBuffer.alloc = function(size, fill, encoding) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + var buf = Buffer2(size); + if (fill !== void 0) { + if (typeof encoding === "string") { + buf.fill(fill, encoding); + } else { + buf.fill(fill); } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + } else { + buf.fill(0); + } + return buf; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getApiBaseUrl = exports2.getProxyFetch = exports2.getProxyAgentDispatcher = exports2.getProxyAgent = exports2.getAuthString = void 0; - var httpClient = __importStar3(require_lib()); - var undici_1 = require_undici(); - function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error("Parameter token or opts.auth is required"); - } else if (token && options.auth) { - throw new Error("Parameters token and opts.auth may not both be specified"); + SafeBuffer.allocUnsafe = function(size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); } - return typeof options.auth === "string" ? options.auth : `token ${token}`; + return Buffer2(size); + }; + SafeBuffer.allocUnsafeSlow = function(size) { + if (typeof size !== "number") { + throw new TypeError("Argument must be a number"); + } + return buffer.SlowBuffer(size); + }; + } +}); + +// ../node_modules/core-util-is/lib/util.js +var require_util8 = __commonJS({ + "../node_modules/core-util-is/lib/util.js"(exports2) { + function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === "[object Array]"; } - __name(getAuthString, "getAuthString"); - exports2.getAuthString = getAuthString; - function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); + __name(isArray, "isArray"); + exports2.isArray = isArray; + function isBoolean(arg) { + return typeof arg === "boolean"; } - __name(getProxyAgent, "getProxyAgent"); - exports2.getProxyAgent = getProxyAgent; - function getProxyAgentDispatcher(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgentDispatcher(destinationUrl); + __name(isBoolean, "isBoolean"); + exports2.isBoolean = isBoolean; + function isNull(arg) { + return arg === null; } - __name(getProxyAgentDispatcher, "getProxyAgentDispatcher"); - exports2.getProxyAgentDispatcher = getProxyAgentDispatcher; - function getProxyFetch(destinationUrl) { - const httpDispatcher = getProxyAgentDispatcher(destinationUrl); - const proxyFetch = /* @__PURE__ */ __name((url, opts) => __awaiter3(this, void 0, void 0, function* () { - return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); - }), "proxyFetch"); - return proxyFetch; + __name(isNull, "isNull"); + exports2.isNull = isNull; + function isNullOrUndefined(arg) { + return arg == null; } - __name(getProxyFetch, "getProxyFetch"); - exports2.getProxyFetch = getProxyFetch; - function getApiBaseUrl() { - return process.env["GITHUB_API_URL"] || "https://api.github.com"; + __name(isNullOrUndefined, "isNullOrUndefined"); + exports2.isNullOrUndefined = isNullOrUndefined; + function isNumber(arg) { + return typeof arg === "number"; } - __name(getApiBaseUrl, "getApiBaseUrl"); - exports2.getApiBaseUrl = getApiBaseUrl; - } -}); - -// ../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js -var require_dist_node = __commonJS({ - "../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - if (typeof process === "object" && process.version !== void 0) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - return ""; + __name(isNumber, "isNumber"); + exports2.isNumber = isNumber; + function isString(arg) { + return typeof arg === "string"; } - __name(getUserAgent, "getUserAgent"); - exports2.getUserAgent = getUserAgent; + __name(isString, "isString"); + exports2.isString = isString; + function isSymbol(arg) { + return typeof arg === "symbol"; + } + __name(isSymbol, "isSymbol"); + exports2.isSymbol = isSymbol; + function isUndefined(arg) { + return arg === void 0; + } + __name(isUndefined, "isUndefined"); + exports2.isUndefined = isUndefined; + function isRegExp(re2) { + return objectToString(re2) === "[object RegExp]"; + } + __name(isRegExp, "isRegExp"); + exports2.isRegExp = isRegExp; + function isObject(arg) { + return typeof arg === "object" && arg !== null; + } + __name(isObject, "isObject"); + exports2.isObject = isObject; + function isDate(d) { + return objectToString(d) === "[object Date]"; + } + __name(isDate, "isDate"); + exports2.isDate = isDate; + function isError(e) { + return objectToString(e) === "[object Error]" || e instanceof Error; + } + __name(isError, "isError"); + exports2.isError = isError; + function isFunction(arg) { + return typeof arg === "function"; + } + __name(isFunction, "isFunction"); + exports2.isFunction = isFunction; + function isPrimitive(arg) { + return arg === null || typeof arg === "boolean" || typeof arg === "number" || typeof arg === "string" || typeof arg === "symbol" || // ES6 symbol + typeof arg === "undefined"; + } + __name(isPrimitive, "isPrimitive"); + exports2.isPrimitive = isPrimitive; + exports2.isBuffer = require("buffer").Buffer.isBuffer; + function objectToString(o) { + return Object.prototype.toString.call(o); + } + __name(objectToString, "objectToString"); } }); -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js -var require_register = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js"(exports2, module2) { - module2.exports = register; - function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - if (!options) { - options = {}; - } - if (Array.isArray(name)) { - return name.reverse().reduce(function(callback, name2) { - return register.bind(null, state, name2, callback, options); - }, method)(); - } - return Promise.resolve().then(function() { - if (!state.registry[name]) { - return method(options); +// ../node_modules/inherits/inherits_browser.js +var require_inherits_browser = __commonJS({ + "../node_modules/inherits/inherits_browser.js"(exports2, module2) { + if (typeof Object.create === "function") { + module2.exports = /* @__PURE__ */ __name(function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor; + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); } - return state.registry[name].reduce(function(method2, registered) { - return registered.hook.bind(null, method2, options); - }, method)(); - }); + }, "inherits"); + } else { + module2.exports = /* @__PURE__ */ __name(function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor; + var TempCtor = /* @__PURE__ */ __name(function() { + }, "TempCtor"); + TempCtor.prototype = superCtor.prototype; + ctor.prototype = new TempCtor(); + ctor.prototype.constructor = ctor; + } + }, "inherits"); } - __name(register, "register"); } }); -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js -var require_add = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js"(exports2, module2) { - module2.exports = addHook; - function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } - if (kind === "before") { - hook = /* @__PURE__ */ __name(function(method, options) { - return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); - }, "hook"); - } - if (kind === "after") { - hook = /* @__PURE__ */ __name(function(method, options) { - var result; - return Promise.resolve().then(method.bind(null, options)).then(function(result_) { - result = result_; - return orig(result, options); - }).then(function() { - return result; - }); - }, "hook"); - } - if (kind === "error") { - hook = /* @__PURE__ */ __name(function(method, options) { - return Promise.resolve().then(method.bind(null, options)).catch(function(error) { - return orig(error, options); - }); - }, "hook"); - } - state.registry[name].push({ - hook, - orig - }); +// ../node_modules/inherits/inherits.js +var require_inherits = __commonJS({ + "../node_modules/inherits/inherits.js"(exports2, module2) { + try { + util = require("util"); + if (typeof util.inherits !== "function") throw ""; + module2.exports = util.inherits; + } catch (e) { + module2.exports = require_inherits_browser(); } - __name(addHook, "addHook"); + var util; } }); -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js -var require_remove = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js"(exports2, module2) { - module2.exports = removeHook; - function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } - var index = state.registry[name].map(function(registered) { - return registered.orig; - }).indexOf(method); - if (index === -1) { - return; +// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/BufferList.js +var require_BufferList = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/BufferList.js"(exports2, module2) { + "use strict"; + function _classCallCheck(instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); } - state.registry[name].splice(index, 1); } - __name(removeHook, "removeHook"); - } -}); - -// ../node_modules/@actions/github/node_modules/before-after-hook/index.js -var require_before_after_hook = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/index.js"(exports2, module2) { - var register = require_register(); - var addHook = require_add(); - var removeHook = require_remove(); - var bind = Function.bind; - var bindable = bind.bind(bind); - function bindApi(hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook.api = { remove: removeHookRef }; - hook.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach(function(kind) { - var args = name ? [state, kind, name] : [state, kind]; - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); - }); + __name(_classCallCheck, "_classCallCheck"); + var Buffer2 = require_safe_buffer().Buffer; + var util = require("util"); + function copyBuffer(src, target, offset) { + src.copy(target, offset); } - __name(bindApi, "bindApi"); - function HookSingular() { - var singularHookName = "h"; - var singularHookState = { - registry: {} - }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; - } - __name(HookSingular, "HookSingular"); - function HookCollection() { - var state = { - registry: {} - }; - var hook = register.bind(null, state); - bindApi(hook, state); - return hook; - } - __name(HookCollection, "HookCollection"); - var collectionHookDeprecationMessageDisplayed = false; - function Hook() { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn( - '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' - ); - collectionHookDeprecationMessageDisplayed = true; + __name(copyBuffer, "copyBuffer"); + module2.exports = function() { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; } - return HookCollection(); + __name(BufferList, "BufferList"); + BufferList.prototype.push = /* @__PURE__ */ __name(function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry; + else this.head = entry; + this.tail = entry; + ++this.length; + }, "push"); + BufferList.prototype.unshift = /* @__PURE__ */ __name(function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }, "unshift"); + BufferList.prototype.shift = /* @__PURE__ */ __name(function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null; + else this.head = this.head.next; + --this.length; + return ret; + }, "shift"); + BufferList.prototype.clear = /* @__PURE__ */ __name(function clear() { + this.head = this.tail = null; + this.length = 0; + }, "clear"); + BufferList.prototype.join = /* @__PURE__ */ __name(function join(s) { + if (this.length === 0) return ""; + var p = this.head; + var ret = "" + p.data; + while (p = p.next) { + ret += s + p.data; + } + return ret; + }, "join"); + BufferList.prototype.concat = /* @__PURE__ */ __name(function concat(n) { + if (this.length === 0) return Buffer2.alloc(0); + var ret = Buffer2.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }, "concat"); + return BufferList; + }(); + if (util && util.inspect && util.inspect.custom) { + module2.exports.prototype[util.inspect.custom] = function() { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + " " + obj; + }; } - __name(Hook, "Hook"); - Hook.Singular = HookSingular.bind(); - Hook.Collection = HookCollection.bind(); - module2.exports = Hook; - module2.exports.Hook = Hook; - module2.exports.Singular = Hook.Singular; - module2.exports.Collection = Hook.Collection; } }); -// ../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js -var require_dist_node2 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js"(exports2, module2) { +// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/destroy.js +var require_destroy = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/destroy.js"(exports2, module2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + var pna = require_process_nextick_args(); + function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } + return this; } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - endpoint: /* @__PURE__ */ __name(() => endpoint, "endpoint") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "9.0.5"; - var userAgent = `octokit-endpoint.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; - var DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "" + if (this._readableState) { + this._readableState.destroyed = true; } - }; - function lowercaseKeys(object) { - if (!object) { - return {}; + if (this._writableState) { + this._writableState.destroyed = true; } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); - } - __name(lowercaseKeys, "lowercaseKeys"); - function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); - } - __name(isPlainObject, "isPlainObject"); - function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach((key) => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) - Object.assign(result, { [key]: options[key] }); - else - result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { [key]: options[key] }); + this._destroy(err || null, function(err2) { + if (!cb && err2) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err2); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err2); + } + } else if (cb) { + cb(err2); } }); - return result; - } - __name(mergeDeep, "mergeDeep"); - function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === void 0) { - delete obj[key]; - } - } - return obj; + return this; } - __name(removeUndefinedProperties, "removeUndefinedProperties"); - function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { method, url } : { url: method }, options); - } else { - options = Object.assign({}, route); + __name(destroy, "destroy"); + function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; } - options.headers = lowercaseKeys(options.headers); - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); - if (options.url === "/graphql") { - if (defaults && defaults.mediaType.previews?.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( - (preview) => !mergedOptions.mediaType.previews.includes(preview) - ).concat(mergedOptions.mediaType.previews); - } - mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; } - return mergedOptions; } - __name(merge, "merge"); - function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - return url + separator + names.map((name) => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); + __name(undestroy, "undestroy"); + function emitErrorNT(self2, err) { + self2.emit("error", err); } - __name(addQueryParameters, "addQueryParameters"); - var urlVariableRegex = /\{[^}]+\}/g; - function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); + __name(emitErrorNT, "emitErrorNT"); + module2.exports = { + destroy, + undestroy + }; + } +}); + +// ../node_modules/util-deprecate/node.js +var require_node = __commonJS({ + "../node_modules/util-deprecate/node.js"(exports2, module2) { + module2.exports = require("util").deprecate; + } +}); + +// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_writable.js +var require_stream_writable = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/_stream_writable.js"(exports2, module2) { + "use strict"; + var pna = require_process_nextick_args(); + module2.exports = Writable; + function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function() { + onCorkedFinish(_this, state); + }; } - __name(removeNonChars, "removeNonChars"); - function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - if (!matches) { - return []; - } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); + __name(CorkedRequest, "CorkedRequest"); + var asyncWrite = !process.browser && ["v0.10", "v0.9."].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; + var Duplex; + Writable.WritableState = WritableState; + var util = Object.create(require_util8()); + util.inherits = require_inherits(); + var internalUtil = { + deprecate: require_node() + }; + var Stream = require_stream(); + var Buffer2 = require_safe_buffer().Buffer; + var OurUint8Array = (typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : {}).Uint8Array || function() { + }; + function _uint8ArrayToBuffer(chunk) { + return Buffer2.from(chunk); } - __name(extractUrlVariableNames, "extractUrlVariableNames"); - function omit(object, keysToOmit) { - const result = { __proto__: null }; - for (const key of Object.keys(object)) { - if (keysToOmit.indexOf(key) === -1) { - result[key] = object[key]; - } - } - return result; + __name(_uint8ArrayToBuffer, "_uint8ArrayToBuffer"); + function _isUint8Array(obj) { + return Buffer2.isBuffer(obj) || obj instanceof OurUint8Array; } - __name(omit, "omit"); - function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - return part; - }).join(""); + __name(_isUint8Array, "_isUint8Array"); + var destroyImpl = require_destroy(); + util.inherits(Writable, Stream); + function nop() { } - __name(encodeReserved, "encodeReserved"); - function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); + __name(nop, "nop"); + function WritableState(options, stream) { + Duplex = Duplex || require_stream_duplex(); + options = options || {}; + var isDuplex = stream instanceof Duplex; + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + if (hwm || hwm === 0) this.highWaterMark = hwm; + else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm; + else this.highWaterMark = defaultHwm; + this.highWaterMark = Math.floor(this.highWaterMark); + this.finalCalled = false; + this.needDrain = false; + this.ending = false; + this.ended = false; + this.finished = false; + this.destroyed = false; + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + this.defaultEncoding = options.defaultEncoding || "utf8"; + this.length = 0; + this.writing = false; + this.corked = 0; + this.sync = true; + this.bufferProcessing = false; + this.onwrite = function(er) { + onwrite(stream, er); + }; + this.writecb = null; + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + this.pendingcb = 0; + this.prefinished = false; + this.errorEmitted = false; + this.bufferedRequestCount = 0; + this.corkedRequestsFree = new CorkedRequest(this); } - __name(encodeUnreserved, "encodeUnreserved"); - function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; + __name(WritableState, "WritableState"); + WritableState.prototype.getBuffer = /* @__PURE__ */ __name(function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; } + return out; + }, "getBuffer"); + (function() { + try { + Object.defineProperty(WritableState.prototype, "buffer", { + get: internalUtil.deprecate(function() { + return this.getBuffer(); + }, "_writableState.buffer is deprecated. Use _writableState.getBuffer instead.", "DEP0003") + }); + } catch (_2) { + } + })(); + var realHasInstance; + if (typeof Symbol === "function" && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === "function") { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: /* @__PURE__ */ __name(function(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + }, "value") + }); + } else { + realHasInstance = /* @__PURE__ */ __name(function(object) { + return object instanceof this; + }, "realHasInstance"); } - __name(encodeValue, "encodeValue"); - function isDefined(value) { - return value !== void 0 && value !== null; - } - __name(isDefined, "isDefined"); - function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; - } - __name(isKeyOperator, "isKeyOperator"); - function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - result.push( - encodeValue(operator, value, isKeyOperator(operator) ? key : "") - ); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - result.push( - encodeValue(operator, value2, isKeyOperator(operator) ? key : "") - ); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - tmp.push(encodeValue(operator, value2)); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } + function Writable(options) { + Duplex = Duplex || require_stream_duplex(); + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); } - return result; + this._writableState = new WritableState(options, this); + this.writable = true; + if (options) { + if (typeof options.write === "function") this._write = options.write; + if (typeof options.writev === "function") this._writev = options.writev; + if (typeof options.destroy === "function") this._destroy = options.destroy; + if (typeof options.final === "function") this._final = options.final; + } + Stream.call(this); } - __name(getValues, "getValues"); - function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; + __name(Writable, "Writable"); + Writable.prototype.pipe = function() { + this.emit("error", new Error("Cannot pipe, not readable")); + }; + function writeAfterEnd(stream, cb) { + var er = new Error("write after end"); + stream.emit("error", er); + pna.nextTick(cb, er); } - __name(parseUrl, "parseUrl"); - function expand(template, context2) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - template = template.replace( - /\{([^\{\}]+)\}|([^\{\}]+)/g, - function(_2, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - expression.split(/,/g).forEach(function(variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); - }); - if (operator && operator !== "+") { - var separator = ","; - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } + __name(writeAfterEnd, "writeAfterEnd"); + function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + if (chunk === null) { + er = new TypeError("May not write null values to stream"); + } else if (typeof chunk !== "string" && chunk !== void 0 && !state.objectMode) { + er = new TypeError("Invalid non-string/buffer chunk"); + } + if (er) { + stream.emit("error", er); + pna.nextTick(cb, er); + valid = false; + } + return valid; + } + __name(validChunk, "validChunk"); + Writable.prototype.write = function(chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer2.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = "buffer"; + else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== "function") cb = nop; + if (state.ended) writeAfterEnd(this, cb); + else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; + }; + Writable.prototype.cork = function() { + var state = this._writableState; + state.corked++; + }; + Writable.prototype.uncork = function() { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } + }; + Writable.prototype.setDefaultEncoding = /* @__PURE__ */ __name(function setDefaultEncoding(encoding) { + if (typeof encoding === "string") encoding = encoding.toLowerCase(); + if (!(["hex", "utf8", "utf-8", "ascii", "binary", "base64", "ucs2", "ucs-2", "utf16le", "utf-16le", "raw"].indexOf((encoding + "").toLowerCase()) > -1)) throw new TypeError("Unknown encoding: " + encoding); + this._writableState.defaultEncoding = encoding; + return this; + }, "setDefaultEncoding"); + function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === "string") { + chunk = Buffer2.from(chunk, encoding); + } + return chunk; + } + __name(decodeChunk, "decodeChunk"); + Object.defineProperty(Writable.prototype, "writableHighWaterMark", { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: /* @__PURE__ */ __name(function() { + return this._writableState.highWaterMark; + }, "get") + }); + function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = "buffer"; + chunk = newChunk; } - ); - if (template === "/") { - return template; + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk, + encoding, + isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; } else { - return template.replace(/\/$/, ""); + doWrite(stream, state, false, len, chunk, encoding, cb); } + return ret; } - __name(expand, "expand"); - function parse2(options) { - let method = options.method.toUpperCase(); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, [ - "method", - "baseUrl", - "url", - "headers", - "request", - "mediaType" - ]); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { - url = options.baseUrl + url; + __name(writeOrBuffer, "writeOrBuffer"); + function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite); + else stream._write(chunk, encoding, state.onwrite); + state.sync = false; + } + __name(doWrite, "doWrite"); + function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + pna.nextTick(cb, er); + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit("error", er); + } else { + cb(er); + stream._writableState.errorEmitted = true; + stream.emit("error", er); + finishMaybe(stream, state); } - const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { - if (options.mediaType.format) { - headers.accept = headers.accept.split(/,/).map( - (format) => format.replace( - /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, - `application/vnd$1$2.${options.mediaType.format}` - ) - ).join(","); + } + __name(onwriteError, "onwriteError"); + function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; + } + __name(onwriteStateUpdate, "onwriteStateUpdate"); + function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb); + else { + var finished = needFinish(state); + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); } - if (url.endsWith("/graphql")) { - if (options.mediaType.previews?.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } + if (sync) { + asyncWrite(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); } } - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; + } + __name(onwrite, "onwrite"); + function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); + } + __name(afterWrite, "afterWrite"); + function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit("drain"); + } + } + __name(onwriteDrain, "onwriteDrain"); + function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, "", holder.finish); + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + if (state.writing) { + break; } } + if (entry === null) state.lastBufferedRequest = null; } - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; + state.bufferedRequest = entry; + state.bufferProcessing = false; + } + __name(clearBuffer, "clearBuffer"); + Writable.prototype._write = function(chunk, encoding, cb) { + cb(new Error("_write() is not implemented")); + }; + Writable.prototype._writev = null; + Writable.prototype.end = function(chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === "function") { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === "function") { + cb = encoding; + encoding = null; } - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; + if (chunk !== null && chunk !== void 0) this.write(chunk, encoding); + if (state.corked) { + state.corked = 1; + this.uncork(); } - return Object.assign( - { method, url, headers }, - typeof body !== "undefined" ? { body } : null, - options.request ? { request: options.request } : null - ); - } - __name(parse2, "parse"); - function endpointWithDefaults(defaults, route, options) { - return parse2(merge(defaults, route, options)); + if (!state.ending) endWritable(this, state, cb); + }; + function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } - __name(endpointWithDefaults, "endpointWithDefaults"); - function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS2 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS2, - defaults: withDefaults.bind(null, DEFAULTS2), - merge: merge.bind(null, DEFAULTS2), - parse: parse2 + __name(needFinish, "needFinish"); + function callFinal(stream, state) { + stream._final(function(err) { + state.pendingcb--; + if (err) { + stream.emit("error", err); + } + state.prefinished = true; + stream.emit("prefinish"); + finishMaybe(stream, state); }); } - __name(withDefaults, "withDefaults"); - var endpoint = withDefaults(null, DEFAULTS); - } -}); - -// ../node_modules/deprecation/dist-node/index.js -var require_dist_node3 = __commonJS({ - "../node_modules/deprecation/dist-node/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var Deprecation = class extends Error { - static { - __name(this, "Deprecation"); - } - constructor(message) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + __name(callFinal, "callFinal"); + function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === "function") { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit("prefinish"); } - this.name = "Deprecation"; } - }; - exports2.Deprecation = Deprecation; - } -}); - -// ../node_modules/wrappy/wrappy.js -var require_wrappy = __commonJS({ - "../node_modules/wrappy/wrappy.js"(exports2, module2) { - module2.exports = wrappy; - function wrappy(fn, cb) { - if (fn && cb) return wrappy(fn)(cb); - if (typeof fn !== "function") - throw new TypeError("need wrapper function"); - Object.keys(fn).forEach(function(k) { - wrapper[k] = fn[k]; - }); - return wrapper; - function wrapper() { - var args = new Array(arguments.length); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i]; - } - var ret = fn.apply(this, args); - var cb2 = args[args.length - 1]; - if (typeof ret === "function" && ret !== cb2) { - Object.keys(cb2).forEach(function(k) { - ret[k] = cb2[k]; - }); + } + __name(prefinish, "prefinish"); + function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit("finish"); } - return ret; } - __name(wrapper, "wrapper"); + return need; } - __name(wrappy, "wrappy"); - } -}); - -// ../node_modules/once/once.js -var require_once = __commonJS({ - "../node_modules/once/once.js"(exports2, module2) { - var wrappy = require_wrappy(); - module2.exports = wrappy(once); - module2.exports.strict = wrappy(onceStrict); - once.proto = once(function() { - Object.defineProperty(Function.prototype, "once", { - value: /* @__PURE__ */ __name(function() { - return once(this); - }, "value"), - configurable: true - }); - Object.defineProperty(Function.prototype, "onceStrict", { - value: /* @__PURE__ */ __name(function() { - return onceStrict(this); - }, "value"), - configurable: true - }); - }); - function once(fn) { - var f = /* @__PURE__ */ __name(function() { - if (f.called) return f.value; - f.called = true; - return f.value = fn.apply(this, arguments); - }, "f"); - f.called = false; - return f; + __name(finishMaybe, "finishMaybe"); + function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb); + else stream.once("finish", cb); + } + state.ended = true; + stream.writable = false; } - __name(once, "once"); - function onceStrict(fn) { - var f = /* @__PURE__ */ __name(function() { - if (f.called) - throw new Error(f.onceError); - f.called = true; - return f.value = fn.apply(this, arguments); - }, "f"); - var name = fn.name || "Function wrapped with `once`"; - f.onceError = name + " shouldn't be called more than once"; - f.called = false; - return f; + __name(endWritable, "endWritable"); + function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + state.corkedRequestsFree.next = corkReq; } - __name(onceStrict, "onceStrict"); + __name(onCorkedFinish, "onCorkedFinish"); + Object.defineProperty(Writable.prototype, "destroyed", { + get: /* @__PURE__ */ __name(function() { + if (this._writableState === void 0) { + return false; + } + return this._writableState.destroyed; + }, "get"), + set: /* @__PURE__ */ __name(function(value) { + if (!this._writableState) { + return; + } + this._writableState.destroyed = value; + }, "set") + }); + Writable.prototype.destroy = destroyImpl.destroy; + Writable.prototype._undestroy = destroyImpl.undestroy; + Writable.prototype._destroy = function(err, cb) { + this.end(); + cb(err); + }; } }); -// ../node_modules/@octokit/request-error/dist-node/index.js -var require_dist_node4 = __commonJS({ - "../node_modules/@octokit/request-error/dist-node/index.js"(exports2, module2) { +// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_duplex.js +var require_stream_duplex = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/_stream_duplex.js"(exports2, module2) { "use strict"; - var __create2 = Object.create; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __getProtoOf2 = Object.getPrototypeOf; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); - } - return to; - }, "__copyProps"); - var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( - // If the importer is in node compatibility mode or this is not an ESM - // file that has been converted to a CommonJS file using a Babel- - // compatible transform (i.e. "__esModule" has not been set), then set - // "default" to the CommonJS "module.exports" for node compatibility. - isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, - mod - )), "__toESM"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - RequestError: /* @__PURE__ */ __name(() => RequestError, "RequestError") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_deprecation = require_dist_node3(); - var import_once = __toESM2(require_once()); - var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); - var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); - var RequestError = class extends Error { - static { - __name(this, "RequestError"); + var pna = require_process_nextick_args(); + var objectKeys = Object.keys || function(obj) { + var keys2 = []; + for (var key in obj) { + keys2.push(key); } - constructor(message, statusCode, options) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - this.name = "HttpError"; - this.status = statusCode; - let headers; - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; + return keys2; + }; + module2.exports = Duplex; + var util = Object.create(require_util8()); + util.inherits = require_inherits(); + var Readable = require_stream_readable(); + var Writable = require_stream_writable(); + util.inherits(Duplex, Readable); + { + keys = objectKeys(Writable.prototype); + for (v = 0; v < keys.length; v++) { + method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } + } + var keys; + var method; + var v; + function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + if (options && options.readable === false) this.readable = false; + if (options && options.writable === false) this.writable = false; + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + this.once("end", onend); + } + __name(Duplex, "Duplex"); + Object.defineProperty(Duplex.prototype, "writableHighWaterMark", { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: /* @__PURE__ */ __name(function() { + return this._writableState.highWaterMark; + }, "get") + }); + function onend() { + if (this.allowHalfOpen || this._writableState.ended) return; + pna.nextTick(onEndNT, this); + } + __name(onend, "onend"); + function onEndNT(self2) { + self2.end(); + } + __name(onEndNT, "onEndNT"); + Object.defineProperty(Duplex.prototype, "destroyed", { + get: /* @__PURE__ */ __name(function() { + if (this._readableState === void 0 || this._writableState === void 0) { + return false; } - const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace( - / .*$/, - " [REDACTED]" - ) - }); + return this._readableState.destroyed && this._writableState.destroyed; + }, "get"), + set: /* @__PURE__ */ __name(function(value) { + if (this._readableState === void 0 || this._writableState === void 0) { + return; } - requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - Object.defineProperty(this, "code", { - get() { - logOnceCode( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.code` is deprecated, use `error.status`." - ) - ); - return statusCode; - } - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." - ) - ); - return headers || {}; - } - }); - } + this._readableState.destroyed = value; + this._writableState.destroyed = value; + }, "set") + }); + Duplex.prototype._destroy = function(err, cb) { + this.push(null); + this.end(); + pna.nextTick(cb, err); }; } }); -// ../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js -var require_dist_node5 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js"(exports2, module2) { +// ../node_modules/jszip/node_modules/string_decoder/lib/string_decoder.js +var require_string_decoder = __commonJS({ + "../node_modules/jszip/node_modules/string_decoder/lib/string_decoder.js"(exports2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + var Buffer2 = require_safe_buffer().Buffer; + var isEncoding = Buffer2.isEncoding || function(encoding) { + encoding = "" + encoding; + switch (encoding && encoding.toLowerCase()) { + case "hex": + case "utf8": + case "utf-8": + case "ascii": + case "binary": + case "base64": + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + case "raw": + return true; + default: + return false; + } + }; + function _normalizeEncoding(enc) { + if (!enc) return "utf8"; + var retried; + while (true) { + switch (enc) { + case "utf8": + case "utf-8": + return "utf8"; + case "ucs2": + case "ucs-2": + case "utf16le": + case "utf-16le": + return "utf16le"; + case "latin1": + case "binary": + return "latin1"; + case "base64": + case "ascii": + case "hex": + return enc; + default: + if (retried) return; + enc = ("" + enc).toLowerCase(); + retried = true; + } } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - request: /* @__PURE__ */ __name(() => request, "request") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_endpoint = require_dist_node2(); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "8.4.0"; - function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } - __name(isPlainObject, "isPlainObject"); - var import_request_error = require_dist_node4(); - function getBufferResponse(response) { - return response.arrayBuffer(); + __name(_normalizeEncoding, "_normalizeEncoding"); + function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== "string" && (Buffer2.isEncoding === isEncoding || !isEncoding(enc))) throw new Error("Unknown encoding: " + enc); + return nenc || enc; } - __name(getBufferResponse, "getBufferResponse"); - function fetchWrapper(requestOptions) { - var _a, _b, _c, _d; - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); + __name(normalizeEncoding, "normalizeEncoding"); + exports2.StringDecoder = StringDecoder; + function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case "utf16le": + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case "utf8": + this.fillLast = utf8FillLast; + nb = 4; + break; + case "base64": + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; } - let headers = {}; - let status; - let url; - let { fetch: fetch2 } = globalThis; - if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { - fetch2 = requestOptions.request.fetch; + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer2.allocUnsafe(nb); + } + __name(StringDecoder, "StringDecoder"); + StringDecoder.prototype.write = function(buf) { + if (buf.length === 0) return ""; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === void 0) return ""; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; } - if (!fetch2) { - throw new Error( - "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" - ); + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ""; + }; + StringDecoder.prototype.end = utf8End; + StringDecoder.prototype.text = utf8Text; + StringDecoder.prototype.fillLast = function(buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); } - return fetch2(requestOptions.url, { - method: requestOptions.method, - body: requestOptions.body, - redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, - headers: requestOptions.headers, - signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, - // duplex must be set if request.body is ReadableStream or Async Iterables. - // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. - ...requestOptions.body && { duplex: "half" } - }).then(async (response) => { - url = response.url; - status = response.status; - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn( - `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` - ); + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; + }; + function utf8CheckByte(byte) { + if (byte <= 127) return 0; + else if (byte >> 5 === 6) return 2; + else if (byte >> 4 === 14) return 3; + else if (byte >> 3 === 30) return 4; + return byte >> 6 === 2 ? -1 : -2; + } + __name(utf8CheckByte, "utf8CheckByte"); + function utf8CheckIncomplete(self2, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self2.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self2.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0; + else self2.lastNeed = nb - 3; } - if (status === 204 || status === 205) { - return; + return nb; + } + return 0; + } + __name(utf8CheckIncomplete, "utf8CheckIncomplete"); + function utf8CheckExtraBytes(self2, buf, p) { + if ((buf[0] & 192) !== 128) { + self2.lastNeed = 0; + return "\uFFFD"; + } + if (self2.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 192) !== 128) { + self2.lastNeed = 1; + return "\uFFFD"; } - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; + if (self2.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 192) !== 128) { + self2.lastNeed = 2; + return "\uFFFD"; } - throw new import_request_error.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: void 0 - }, - request: requestOptions - }); - } - if (status === 304) { - throw new import_request_error.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - if (status >= 400) { - const data = await getResponseData(response); - const error = new import_request_error.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; } - return parseSuccessResponseBody ? await getResponseData(response) : response.body; - }).then((data) => { - return { - status, - url, - headers, - data - }; - }).catch((error) => { - if (error instanceof import_request_error.RequestError) - throw error; - else if (error.name === "AbortError") - throw error; - let message = error.message; - if (error.name === "TypeError" && "cause" in error) { - if (error.cause instanceof Error) { - message = error.cause.message; - } else if (typeof error.cause === "string") { - message = error.cause; + } + } + __name(utf8CheckExtraBytes, "utf8CheckExtraBytes"); + function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== void 0) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; + } + __name(utf8FillLast, "utf8FillLast"); + function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString("utf8", i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString("utf8", i, end); + } + __name(utf8Text, "utf8Text"); + function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ""; + if (this.lastNeed) return r + "\uFFFD"; + return r; + } + __name(utf8End, "utf8End"); + function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString("utf16le", i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 55296 && c <= 56319) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); } } - throw new import_request_error.RequestError(message, 500, { - request: requestOptions - }); - }); - } - __name(fetchWrapper, "fetchWrapper"); - async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json().catch(() => response.text()).catch(() => ""); + return r; } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString("utf16le", i, buf.length - 1); + } + __name(utf16Text, "utf16Text"); + function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ""; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString("utf16le", 0, end); } - return getBufferResponse(response); + return r; } - __name(getResponseData, "getResponseData"); - function toErrorMessage(data) { - if (typeof data === "string") - return data; - let suffix; - if ("documentation_url" in data) { - suffix = ` - ${data.documentation_url}`; + __name(utf16End, "utf16End"); + function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString("base64", i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; } else { - suffix = ""; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; } - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; - } - return `${data.message}${suffix}`; - } - return `Unknown error: ${JSON.stringify(data)}`; + return buf.toString("base64", i, buf.length - n); } - __name(toErrorMessage, "toErrorMessage"); - function withDefaults(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); - const newApi = /* @__PURE__ */ __name(function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); - } - const request2 = /* @__PURE__ */ __name((route2, parameters2) => { - return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) - ); - }, "request2"); - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); - return endpointOptions.request.hook(request2, endpointOptions); - }, "newApi"); - return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); + __name(base64Text, "base64Text"); + function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ""; + if (this.lastNeed) return r + this.lastChar.toString("base64", 0, 3 - this.lastNeed); + return r; } - __name(withDefaults, "withDefaults"); - var request = withDefaults(import_endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` - } - }); + __name(base64End, "base64End"); + function simpleWrite(buf) { + return buf.toString(this.encoding); + } + __name(simpleWrite, "simpleWrite"); + function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ""; + } + __name(simpleEnd, "simpleEnd"); } }); -// ../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js -var require_dist_node6 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js"(exports2, module2) { +// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_readable.js +var require_stream_readable = __commonJS({ + "../node_modules/jszip/node_modules/readable-stream/lib/_stream_readable.js"(exports2, module2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); - } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - GraphqlResponseError: /* @__PURE__ */ __name(() => GraphqlResponseError, "GraphqlResponseError"), - graphql: /* @__PURE__ */ __name(() => graphql2, "graphql"), - withCustomRequest: /* @__PURE__ */ __name(() => withCustomRequest, "withCustomRequest") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_request3 = require_dist_node5(); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "7.1.0"; - var import_request2 = require_dist_node5(); - var import_request = require_dist_node5(); - function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors: -` + data.errors.map((e) => ` - ${e.message}`).join("\n"); + var pna = require_process_nextick_args(); + module2.exports = Readable; + var isArray = require_isarray(); + var Duplex; + Readable.ReadableState = ReadableState; + var EE = require("events").EventEmitter; + var EElistenerCount = /* @__PURE__ */ __name(function(emitter, type) { + return emitter.listeners(type).length; + }, "EElistenerCount"); + var Stream = require_stream(); + var Buffer2 = require_safe_buffer().Buffer; + var OurUint8Array = (typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : {}).Uint8Array || function() { + }; + function _uint8ArrayToBuffer(chunk) { + return Buffer2.from(chunk); } - __name(_buildMessageForResponseErrors, "_buildMessageForResponseErrors"); - var GraphqlResponseError = class extends Error { - static { - __name(this, "GraphqlResponseError"); - } - constructor(request2, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request2; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; - this.errors = response.errors; - this.data = response.data; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + __name(_uint8ArrayToBuffer, "_uint8ArrayToBuffer"); + function _isUint8Array(obj) { + return Buffer2.isBuffer(obj) || obj instanceof OurUint8Array; + } + __name(_isUint8Array, "_isUint8Array"); + var util = Object.create(require_util8()); + util.inherits = require_inherits(); + var debugUtil = require("util"); + var debug = void 0; + if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog("stream"); + } else { + debug = /* @__PURE__ */ __name(function() { + }, "debug"); + } + var BufferList = require_BufferList(); + var destroyImpl = require_destroy(); + var StringDecoder; + util.inherits(Readable, Stream); + var kProxyEvents = ["error", "close", "destroy", "pause", "resume"]; + function prependListener(emitter, event, fn) { + if (typeof emitter.prependListener === "function") return emitter.prependListener(event, fn); + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn); + else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn); + else emitter._events[event] = [fn, emitter._events[event]]; + } + __name(prependListener, "prependListener"); + function ReadableState(options, stream) { + Duplex = Duplex || require_stream_duplex(); + options = options || {}; + var isDuplex = stream instanceof Duplex; + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + if (hwm || hwm === 0) this.highWaterMark = hwm; + else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm; + else this.highWaterMark = defaultHwm; + this.highWaterMark = Math.floor(this.highWaterMark); + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + this.sync = true; + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.destroyed = false; + this.defaultEncoding = options.defaultEncoding || "utf8"; + this.awaitDrain = 0; + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require_string_decoder().StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; } - }; - var NON_VARIABLE_OPTIONS = [ - "method", - "baseUrl", - "url", - "headers", - "request", - "query", - "mediaType" - ]; - var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; - var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; - function graphql(request2, query, options) { + } + __name(ReadableState, "ReadableState"); + function Readable(options) { + Duplex = Duplex || require_stream_duplex(); + if (!(this instanceof Readable)) return new Readable(options); + this._readableState = new ReadableState(options, this); + this.readable = true; if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject( - new Error(`[@octokit/graphql] "query" cannot be used as variable name`) - ); - } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) - continue; - return Promise.reject( - new Error( - `[@octokit/graphql] "${key}" cannot be used as variable name` - ) - ); - } + if (typeof options.read === "function") this._read = options.read; + if (typeof options.destroy === "function") this._destroy = options.destroy; } - const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; - const requestOptions = Object.keys( - parsedOptions - ).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; + Stream.call(this); + } + __name(Readable, "Readable"); + Object.defineProperty(Readable.prototype, "destroyed", { + get: /* @__PURE__ */ __name(function() { + if (this._readableState === void 0) { + return false; } - if (!result.variables) { - result.variables = {}; + return this._readableState.destroyed; + }, "get"), + set: /* @__PURE__ */ __name(function(value) { + if (!this._readableState) { + return; } - result.variables[key] = parsedOptions[key]; - return result; - }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + this._readableState.destroyed = value; + }, "set") + }); + Readable.prototype.destroy = destroyImpl.destroy; + Readable.prototype._undestroy = destroyImpl.undestroy; + Readable.prototype._destroy = function(err, cb) { + this.push(null); + cb(err); + }; + Readable.prototype.push = function(chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === "string") { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer2.from(chunk, encoding); + encoding = ""; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; } - return request2(requestOptions).then((response) => { - if (response.data.errors) { - const headers = {}; - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); + }; + Readable.prototype.unshift = function(chunk) { + return readableAddChunk(this, chunk, null, true, false); + }; + function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit("error", er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== "string" && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer2.prototype) { + chunk = _uint8ArrayToBuffer(chunk); } - throw new GraphqlResponseError( - requestOptions, - headers, - response.data - ); + if (addToFront) { + if (state.endEmitted) stream.emit("error", new Error("stream.unshift() after end event")); + else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit("error", new Error("stream.push() after EOF")); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false); + else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; } - return response.data.data; - }); - } - __name(graphql, "graphql"); - function withDefaults(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); - const newApi = /* @__PURE__ */ __name((query, options) => { - return graphql(newRequest, query, options); - }, "newApi"); - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: newRequest.endpoint - }); + } + return needMoreData(state); } - __name(withDefaults, "withDefaults"); - var graphql2 = withDefaults(import_request3.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` - }, - method: "POST", - url: "/graphql" - }); - function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); + __name(readableAddChunk, "readableAddChunk"); + function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit("data", chunk); + stream.read(0); + } else { + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk); + else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); } - __name(withCustomRequest, "withCustomRequest"); - } -}); - -// ../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js -var require_dist_node7 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + __name(addChunk, "addChunk"); + function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== "string" && chunk !== void 0 && !state.objectMode) { + er = new TypeError("Invalid non-string/buffer chunk"); } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - createTokenAuth: /* @__PURE__ */ __name(() => createTokenAuth, "createTokenAuth") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; - var REGEX_IS_INSTALLATION = /^ghs_/; - var REGEX_IS_USER_TO_SERVER = /^ghu_/; - async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token, - tokenType - }; + return er; } - __name(auth, "auth"); - function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; + __name(chunkInvalid, "chunkInvalid"); + function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); + } + __name(needMoreData, "needMoreData"); + Readable.prototype.isPaused = function() { + return this._readableState.flowing === false; + }; + Readable.prototype.setEncoding = function(enc) { + if (!StringDecoder) StringDecoder = require_string_decoder().StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; + }; + var MAX_HWM = 8388608; + function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; } - return `token ${token}`; + return n; } - __name(withAuthorizationPrefix, "withAuthorizationPrefix"); - async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge( - route, - parameters - ); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); + __name(computeNewHighWaterMark, "computeNewHighWaterMark"); + function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + if (state.flowing && state.length) return state.buffer.head.data.length; + else return state.length; + } + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; } - __name(hook, "hook"); - var createTokenAuth = /* @__PURE__ */ __name(function createTokenAuth2(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + __name(howMuchToRead, "howMuchToRead"); + Readable.prototype.read = function(n) { + debug("read", n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug("read: emitReadable", state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this); + else emitReadable(this); + return null; } - if (typeof token !== "string") { - throw new Error( - "[@octokit/auth-token] Token passed to createTokenAuth is not a string" - ); + n = howMuchToRead(n, state); + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); - }, "createTokenAuth2"); - } -}); - -// ../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js -var require_dist_node8 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + var doRead = state.needReadable; + debug("need readable", doRead); + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug("length less than watermark", doRead); } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - Octokit: /* @__PURE__ */ __name(() => Octokit, "Octokit") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_universal_user_agent = require_dist_node(); - var import_before_after_hook = require_before_after_hook(); - var import_request = require_dist_node5(); - var import_graphql = require_dist_node6(); - var import_auth_token = require_dist_node7(); - var VERSION3 = "5.2.0"; - var noop = /* @__PURE__ */ __name(() => { - }, "noop"); - var consoleWarn = console.warn.bind(console); - var consoleError = console.error.bind(console); - var userAgentTrail = `octokit-core.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; - var Octokit = class { - static { - __name(this, "Octokit"); + if (state.ended || state.reading) { + doRead = false; + debug("reading or ended", doRead); + } else if (doRead) { + debug("do read"); + state.reading = true; + state.sync = true; + if (state.length === 0) state.needReadable = true; + this._read(state.highWaterMark); + state.sync = false; + if (!state.reading) n = howMuchToRead(nOrig, state); } - static { - this.VERSION = VERSION3; + var ret; + if (n > 0) ret = fromList(n, state); + else ret = null; + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; } - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - static { - __name(this, "OctokitWithDefaults"); - } - constructor(...args) { - const options = args[0] || {}; - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - super( - Object.assign( - {}, - defaults, - options, - options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null - ) - ); - } - }; - return OctokitWithDefaults; + if (state.length === 0) { + if (!state.ended) state.needReadable = true; + if (nOrig !== n && state.ended) endReadable(this); } - static { - this.plugins = []; + if (ret !== null) this.emit("data", ret); + return ret; + }; + function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - static plugin(...newPlugins) { - const currentPlugins = this.plugins; - const NewOctokit = class extends this { - static { - __name(this, "NewOctokit"); - } - static { - this.plugins = currentPlugins.concat( - newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) - ); + state.ended = true; + emitReadable(stream); + } + __name(onEofChunk, "onEofChunk"); + function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug("emitReadable", state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream); + else emitReadable_(stream); + } + } + __name(emitReadable, "emitReadable"); + function emitReadable_(stream) { + debug("emit readable"); + stream.emit("readable"); + flow(stream); + } + __name(emitReadable_, "emitReadable_"); + function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } + } + __name(maybeReadMore, "maybeReadMore"); + function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug("maybeReadMore read 0"); + stream.read(0); + if (len === state.length) + break; + else len = state.length; + } + state.readingMore = false; + } + __name(maybeReadMore_, "maybeReadMore_"); + Readable.prototype._read = function(n) { + this.emit("error", new Error("_read() is not implemented")); + }; + Readable.prototype.pipe = function(dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug("pipe count=%d opts=%j", state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn); + else src.once("end", endFn); + dest.on("unpipe", onunpipe); + function onunpipe(readable, unpipeInfo) { + debug("onunpipe"); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); } - }; - return NewOctokit; + } } - constructor(options = {}) { - const hook = new import_before_after_hook.Collection(); - const requestDefaults = { - baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" + __name(onunpipe, "onunpipe"); + function onend() { + debug("onend"); + dest.end(); + } + __name(onend, "onend"); + var ondrain = pipeOnDrain(src); + dest.on("drain", ondrain); + var cleanedUp = false; + function cleanup() { + debug("cleanup"); + dest.removeListener("close", onclose); + dest.removeListener("finish", onfinish); + dest.removeListener("drain", ondrain); + dest.removeListener("error", onerror); + dest.removeListener("unpipe", onunpipe); + src.removeListener("end", onend); + src.removeListener("end", unpipe); + src.removeListener("data", ondata); + cleanedUp = true; + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + __name(cleanup, "cleanup"); + var increasedAwaitDrain = false; + src.on("data", ondata); + function ondata(chunk) { + debug("ondata"); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug("false write response, pause", state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; } - }; - requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; + src.pause(); } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; + } + __name(ondata, "ondata"); + function onerror(er) { + debug("onerror", er); + unpipe(); + dest.removeListener("error", onerror); + if (EElistenerCount(dest, "error") === 0) dest.emit("error", er); + } + __name(onerror, "onerror"); + prependListener(dest, "error", onerror); + function onclose() { + dest.removeListener("finish", onfinish); + unpipe(); + } + __name(onclose, "onclose"); + dest.once("close", onclose); + function onfinish() { + debug("onfinish"); + dest.removeListener("close", onclose); + unpipe(); + } + __name(onfinish, "onfinish"); + dest.once("finish", onfinish); + function unpipe() { + debug("unpipe"); + src.unpipe(dest); + } + __name(unpipe, "unpipe"); + dest.emit("pipe", src); + if (!state.flowing) { + debug("pipe resume"); + src.resume(); + } + return dest; + }; + function pipeOnDrain(src) { + return function() { + var state = src._readableState; + debug("pipeOnDrain", state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, "data")) { + state.flowing = true; + flow(src); } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; + }; + } + __name(pipeOnDrain, "pipeOnDrain"); + Readable.prototype.unpipe = function(dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + if (state.pipesCount === 0) return this; + if (state.pipesCount === 1) { + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit("unpipe", this, unpipeInfo); + return this; + } + if (!dest) { + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) { + dests[i].emit("unpipe", this, { hasUnpiped: false }); } - this.request = import_request.request.defaults(requestDefaults); - this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); - this.log = Object.assign( - { - debug: noop, - info: noop, - warn: consoleWarn, - error: consoleError - }, - options.log - ); - this.hook = hook; - if (!options.authStrategy) { - if (!options.auth) { - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - const auth = (0, import_auth_token.createTokenAuth)(options.auth); - hook.wrap("request", auth.hook); - this.auth = auth; + return this; + } + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit("unpipe", this, unpipeInfo); + return this; + }; + Readable.prototype.on = function(ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + if (ev === "data") { + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === "readable") { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); } - } else { - const { authStrategy, ...otherOptions } = options; - const auth = authStrategy( - Object.assign( - { - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, - options.auth - ) - ); - hook.wrap("request", auth.hook); - this.auth = auth; - } - const classConstructor = this.constructor; - for (let i = 0; i < classConstructor.plugins.length; ++i) { - Object.assign(this, classConstructor.plugins[i](this, options)); } } + return res; }; - } -}); - -// ../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js -var require_dist_node9 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + Readable.prototype.addListener = Readable.prototype.on; + function nReadingNextTick(self2) { + debug("readable nexttick read 0"); + self2.read(0); + } + __name(nReadingNextTick, "nReadingNextTick"); + Readable.prototype.resume = function() { + var state = this._readableState; + if (!state.flowing) { + debug("resume"); + state.flowing = true; + resume(this, state); } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - legacyRestEndpointMethods: /* @__PURE__ */ __name(() => legacyRestEndpointMethods, "legacyRestEndpointMethods"), - restEndpointMethods: /* @__PURE__ */ __name(() => restEndpointMethods, "restEndpointMethods") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var VERSION3 = "10.4.1"; - var Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: [ - "POST /orgs/{org}/actions/runners/{runner_id}/labels" - ], - addCustomLabelsToSelfHostedRunnerForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - approveWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" - ], - createEnvironmentVariable: [ - "POST /repositories/{repository_id}/environments/{environment_name}/variables" - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - createOrgVariable: ["POST /orgs/{org}/actions/variables"], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token" - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token" - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token" - ], - createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" - ], - deleteActionsCacheById: [ - "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" - ], - deleteActionsCacheByKey: [ - "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - deleteEnvironmentVariable: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - deleteRepoVariable: [ - "DELETE /repos/{owner}/{repo}/actions/variables/{name}" - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}" - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" - ], - downloadWorkflowRunAttemptLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" - ], - forceCancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" - ], - generateRunnerJitconfigForOrg: [ - "POST /orgs/{org}/actions/runners/generate-jitconfig" - ], - generateRunnerJitconfigForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" - ], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: [ - "GET /orgs/{org}/actions/cache/usage-by-repository" - ], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions" - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getCustomOidcSubClaimForRepo: [ - "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" - ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - getEnvironmentVariable: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - getGithubActionsDefaultWorkflowPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions/workflow" - ], - getGithubActionsDefaultWorkflowPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/workflow" - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions" - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions" - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] } - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/access" - ], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" - ], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets" - ], - listEnvironmentVariables: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables" - ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" - ], - listJobsForWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" - ], - listLabelsForSelfHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/runners/{runner_id}/labels" - ], - listLabelsForSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listOrgVariables: ["GET /orgs/{org}/actions/variables"], - listRepoOrganizationSecrets: [ - "GET /repos/{owner}/{repo}/actions/organization-secrets" - ], - listRepoOrganizationVariables: [ - "GET /repos/{owner}/{repo}/actions/organization-variables" - ], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads" - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - listSelectedReposForOrgVariable: [ - "GET /orgs/{org}/actions/variables/{name}/repositories" - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories" - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" - ], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" - ], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" - ], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - removeCustomLabelFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" - ], - removeCustomLabelFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgVariable: [ - "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - reviewCustomGatesForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions" - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - setCustomLabelsForSelfHostedRunnerForOrg: [ - "PUT /orgs/{org}/actions/runners/{runner_id}/labels" - ], - setCustomLabelsForSelfHostedRunnerForRepo: [ - "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - setCustomOidcSubClaimForRepo: [ - "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - setGithubActionsDefaultWorkflowPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/workflow" - ], - setGithubActionsDefaultWorkflowPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/workflow" - ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions" - ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories" - ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories" - ], - setWorkflowAccessToRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/access" - ], - updateEnvironmentVariable: [ - "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], - updateRepoVariable: [ - "PATCH /repos/{owner}/{repo}/actions/variables/{name}" - ] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription" - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription" - ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}" - ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public" - ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications" - ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription" - ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } - ], - addRepoToInstallationForAuthenticatedUser: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}" - ], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens" - ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}" - ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}" - ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" - ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories" - ], - listInstallationRequestsForAuthenticatedApp: [ - "GET /app/installation-requests" - ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed" - ], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: [ - "POST /app/hook/deliveries/{delivery_id}/attempts" - ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } - ], - removeRepoFromInstallationForAuthenticatedUser: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}" - ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended" - ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions" - ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages" - ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage" - ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage" - ] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" - ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" - ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: [ - "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" - ], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" - ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences" - ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" - ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } } - ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" - ], - getCodeqlDatabase: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" - ], - getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" - ], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - {}, - { renamed: ["codeScanning", "listAlertInstances"] } - ], - listCodeqlDatabases: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" - ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" - ], - updateDefaultSetup: [ - "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" - ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - checkPermissionsForDevcontainer: [ - "GET /repos/{owner}/{repo}/codespaces/permissions_check" - ], - codespaceMachinesForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/machines" - ], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - createOrUpdateSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}" - ], - createWithPrForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" - ], - createWithRepoForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/codespaces" - ], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: [ - "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - deleteSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}" - ], - exportForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/exports" - ], - getCodespacesForUserInOrg: [ - "GET /orgs/{org}/members/{username}/codespaces" - ], - getExportDetailsForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/exports/{export_id}" - ], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], - getPublicKeyForAuthenticatedUser: [ - "GET /user/codespaces/secrets/public-key" - ], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - getSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}" - ], - listDevcontainersInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/devcontainers" - ], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: [ - "GET /orgs/{org}/codespaces", - {}, - { renamedParameters: { org_id: "org" } } - ], - listInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces" - ], - listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}/repositories" - ], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - preFlightWithRepoForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/new" - ], - publishForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/publish" - ], - removeRepositoryForSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - repoMachinesForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/machines" - ], - setRepositoriesForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: [ - "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" - ], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - copilot: { - addCopilotSeatsForTeams: [ - "POST /orgs/{org}/copilot/billing/selected_teams" - ], - addCopilotSeatsForUsers: [ - "POST /orgs/{org}/copilot/billing/selected_users" - ], - cancelCopilotSeatAssignmentForTeams: [ - "DELETE /orgs/{org}/copilot/billing/selected_teams" - ], - cancelCopilotSeatAssignmentForUsers: [ - "DELETE /orgs/{org}/copilot/billing/selected_users" - ], - getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], - getCopilotSeatDetailsForUser: [ - "GET /orgs/{org}/members/{username}/copilot" - ], - listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] - }, - dependabot: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/dependabot/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" - ] - }, - dependencyGraph: { - createRepositorySnapshot: [ - "POST /repos/{owner}/{repo}/dependency-graph/snapshots" - ], - diffRange: [ - "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" - ], - exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] - }, - emojis: { get: ["GET /emojis"] }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", - {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } - ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits" - ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", - {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } - ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", - {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } - ] - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - checkUserCanBeAssignedToIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" - ], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" - ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" - ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" - ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" - ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" - ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" - ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" - ] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } } - ] - }, - meta: { - get: ["GET /meta"], - getAllVersions: ["GET /versions"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: [ - "DELETE /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" - } - ], - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive" - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive" - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive" - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive" - ], - getCommitAuthors: [ - "GET /repos/{owner}/{repo}/import/authors", - {}, - { - deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" - } - ], - getImportStatus: [ - "GET /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" - } - ], - getLargeFiles: [ - "GET /repos/{owner}/{repo}/import/large_files", - {}, - { - deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" - } - ], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/repositories" - ], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - {}, - { renamed: ["migrations", "listReposForAuthenticatedUser"] } - ], - mapCommitAuthor: [ - "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", - {}, - { - deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" - } - ], - setLfsPreference: [ - "PATCH /repos/{owner}/{repo}/import/lfs", - {}, - { - deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" - } - ], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: [ - "PUT /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" - } - ], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" - ], - updateImport: [ - "PATCH /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" - } - ] - }, - oidc: { - getOidcCustomSubTemplateForOrg: [ - "GET /orgs/{org}/actions/oidc/customization/sub" - ], - updateOidcCustomSubTemplateForOrg: [ - "PUT /orgs/{org}/actions/oidc/customization/sub" - ] - }, - orgs: { - addSecurityManagerTeam: [ - "PUT /orgs/{org}/security-managers/teams/{team_slug}" - ], - assignTeamToOrgRole: [ - "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - assignUserToOrgRole: [ - "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}" - ], - createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], - createInvitation: ["POST /orgs/{org}/invitations"], - createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], - createOrUpdateCustomPropertiesValuesForRepos: [ - "PATCH /orgs/{org}/properties/values" - ], - createOrUpdateCustomProperty: [ - "PUT /orgs/{org}/properties/schema/{custom_property_name}" - ], - createWebhook: ["POST /orgs/{org}/hooks"], - delete: ["DELETE /orgs/{org}"], - deleteCustomOrganizationRole: [ - "DELETE /orgs/{org}/organization-roles/{role_id}" - ], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - enableOrDisableSecurityProductOnAllOrgRepos: [ - "POST /orgs/{org}/{security_product}/{enablement}" - ], - get: ["GET /orgs/{org}"], - getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], - getCustomProperty: [ - "GET /orgs/{org}/properties/schema/{custom_property_name}" - ], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: [ - "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], - listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], - listOrgRoles: ["GET /orgs/{org}/organization-roles"], - listOrganizationFineGrainedPermissions: [ - "GET /orgs/{org}/organization-fine-grained-permissions" - ], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPatGrantRepositories: [ - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" - ], - listPatGrantRequestRepositories: [ - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" - ], - listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], - listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - patchCustomOrganizationRole: [ - "PATCH /orgs/{org}/organization-roles/{role_id}" - ], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeCustomProperty: [ - "DELETE /orgs/{org}/properties/schema/{custom_property_name}" - ], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}" - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}" - ], - removeSecurityManagerTeam: [ - "DELETE /orgs/{org}/security-managers/teams/{team_slug}" - ], - reviewPatGrantRequest: [ - "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" - ], - reviewPatGrantRequestsInBulk: [ - "POST /orgs/{org}/personal-access-token-requests" - ], - revokeAllOrgRolesTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" - ], - revokeAllOrgRolesUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}" - ], - revokeOrgRoleTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - revokeOrgRoleUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}" - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}" - ], - updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], - updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}" - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}" - ], - deletePackageForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}" - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" - ] - } - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions" - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}" - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}" - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}" - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - listDockerMigrationConflictingPackagesForAuthenticatedUser: [ - "GET /user/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForOrganization: [ - "GET /orgs/{org}/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForUser: [ - "GET /users/{username}/docker/conflicts" - ], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: [ - "GET /projects/{project_id}/collaborators/{username}/permission" - ], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: [ - "DELETE /projects/{project_id}/collaborators/{username}" - ], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ] - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - createForRelease: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForRelease: [ - "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - listForRelease: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ] - }, - repos: { - acceptInvitation: [ - "PATCH /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } - ], - acceptInvitationForAuthenticatedUser: [ - "PATCH /user/repository_invitations/{invitation_id}" - ], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - cancelPagesDeployment: [ - "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" - ], - checkAutomatedSecurityFixes: [ - "GET /repos/{owner}/{repo}/automated-security-fixes" - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts" - ], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: [ - "GET /repos/{owner}/{repo}/compare/{basehead}" - ], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentBranchPolicy: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - createDeploymentProtectionRule: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateCustomPropertiesValues: [ - "PATCH /repos/{owner}/{repo}/properties/values" - ], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}" - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createOrgRuleset: ["POST /orgs/{org}/rulesets"], - createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate" - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: [ - "DELETE /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } - ], - declineInvitationForAuthenticatedUser: [ - "DELETE /user/repository_invitations/{invitation_id}" - ], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}" - ], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" - ], - deleteDeploymentBranchPolicy: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - deleteTagProtection: [ - "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" - ], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes" - ], - disableDeploymentProtectionRule: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - disablePrivateVulnerabilityReporting: [ - "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts" - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] } - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes" - ], - enablePrivateVulnerabilityReporting: [ - "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts" - ], - generateReleaseNotes: [ - "POST /repos/{owner}/{repo}/releases/generate-notes" - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - getAllDeploymentProtectionRules: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" - ], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" - ], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection" - ], - getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission" - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getCustomDeploymentProtectionRule: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentBranchPolicy: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}" - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], - getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], - getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], - getOrgRulesets: ["GET /orgs/{org}/rulesets"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesDeployment: [ - "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" - ], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getRepoRuleSuite: [ - "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" - ], - getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], - getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - getWebhookDelivery: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - listActivities: ["GET /repos/{owner}/{repo}/activity"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses" - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listCustomDeploymentRuleIntegrations: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" - ], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentBranchPolicies: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets" - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" - ], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}" - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection" - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateDeploymentBranchPolicy: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] } - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" } - ] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/secret-scanning/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ] - }, - securityAdvisories: { - createFork: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" - ], - createPrivateVulnerabilityReport: [ - "POST /repos/{owner}/{repo}/security-advisories/reports" - ], - createRepositoryAdvisory: [ - "POST /repos/{owner}/{repo}/security-advisories" - ], - createRepositoryAdvisoryCveRequest: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" - ], - getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], - getRepositoryAdvisory: [ - "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ], - listGlobalAdvisories: ["GET /advisories"], - listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], - listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], - updateRepositoryAdvisory: [ - "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ] - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - addOrUpdateProjectPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - checkPermissionsForProjectInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations" - ], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - removeProjectInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: [ - "POST /user/emails", - {}, - { renamed: ["users", "addEmailForAuthenticatedUser"] } - ], - addEmailForAuthenticatedUser: ["POST /user/emails"], - addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: [ - "POST /user/gpg_keys", - {}, - { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } - ], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: [ - "POST /user/keys", - {}, - { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } - ], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], - deleteEmailForAuthenticated: [ - "DELETE /user/emails", - {}, - { renamed: ["users", "deleteEmailForAuthenticatedUser"] } - ], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: [ - "DELETE /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } - ], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: [ - "DELETE /user/keys/{key_id}", - {}, - { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } - ], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], - deleteSshSigningKeyForAuthenticatedUser: [ - "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: [ - "GET /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } - ], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: [ - "GET /user/keys/{key_id}", - {}, - { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } - ], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - getSshSigningKeyForAuthenticatedUser: [ - "GET /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - list: ["GET /users"], - listBlockedByAuthenticated: [ - "GET /user/blocks", - {}, - { renamed: ["users", "listBlockedByAuthenticatedUser"] } - ], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: [ - "GET /user/emails", - {}, - { renamed: ["users", "listEmailsForAuthenticatedUser"] } - ], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: [ - "GET /user/following", - {}, - { renamed: ["users", "listFollowedByAuthenticatedUser"] } - ], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: [ - "GET /user/gpg_keys", - {}, - { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } - ], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: [ - "GET /user/public_emails", - {}, - { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } - ], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: [ - "GET /user/keys", - {}, - { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } - ], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], - listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], - listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], - listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], - setPrimaryEmailVisibilityForAuthenticated: [ - "PATCH /user/email/visibility", - {}, - { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } - ], - setPrimaryEmailVisibilityForAuthenticatedUser: [ - "PATCH /user/email/visibility" - ], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } - }; - var endpoints_default = Endpoints; - var endpointMethodsMap = /* @__PURE__ */ new Map(); - for (const [scope, endpoints] of Object.entries(endpoints_default)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign( - { - method, - url - }, - defaults - ); - if (!endpointMethodsMap.has(scope)) { - endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); - } - endpointMethodsMap.get(scope).set(methodName, { - scope, - methodName, - endpointDefaults, - decorations - }); - } - } - var handler = { - has({ scope }, methodName) { - return endpointMethodsMap.get(scope).has(methodName); - }, - getOwnPropertyDescriptor(target, methodName) { - return { - value: this.get(target, methodName), - // ensures method is in the cache - configurable: true, - writable: true, - enumerable: true - }; - }, - defineProperty(target, methodName, descriptor) { - Object.defineProperty(target.cache, methodName, descriptor); - return true; - }, - deleteProperty(target, methodName) { - delete target.cache[methodName]; - return true; - }, - ownKeys({ scope }) { - return [...endpointMethodsMap.get(scope).keys()]; - }, - set(target, methodName, value) { - return target.cache[methodName] = value; - }, - get({ octokit, scope, cache }, methodName) { - if (cache[methodName]) { - return cache[methodName]; - } - const method = endpointMethodsMap.get(scope).get(methodName); - if (!method) { - return void 0; - } - const { endpointDefaults, decorations } = method; - if (decorations) { - cache[methodName] = decorate( - octokit, - scope, - methodName, - endpointDefaults, - decorations - ); - } else { - cache[methodName] = octokit.request.defaults(endpointDefaults); - } - return cache[methodName]; - } - }; - function endpointsToMethods(octokit) { - const newMethods = {}; - for (const scope of endpointMethodsMap.keys()) { - newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); - } - return newMethods; - } - __name(endpointsToMethods, "endpointsToMethods"); - function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - function withDecorations(...args) { - let options = requestWithDefaults.endpoint.merge(...args); - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: void 0 - }); - return requestWithDefaults(options); - } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn( - `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` - ); - } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - if (decorations.renamedParameters) { - const options2 = requestWithDefaults.endpoint.merge(...args); - for (const [name, alias] of Object.entries( - decorations.renamedParameters - )) { - if (name in options2) { - octokit.log.warn( - `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` - ); - if (!(alias in options2)) { - options2[alias] = options2[name]; - } - delete options2[name]; - } - } - return requestWithDefaults(options2); - } - return requestWithDefaults(...args); - } - __name(withDecorations, "withDecorations"); - return Object.assign(withDecorations, requestWithDefaults); - } - __name(decorate, "decorate"); - function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - rest: api - }; - } - __name(restEndpointMethods, "restEndpointMethods"); - restEndpointMethods.VERSION = VERSION3; - function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - ...api, - rest: api - }; - } - __name(legacyRestEndpointMethods, "legacyRestEndpointMethods"); - legacyRestEndpointMethods.VERSION = VERSION3; - } -}); - -// ../node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js -var require_dist_node10 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); - } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - composePaginateRest: /* @__PURE__ */ __name(() => composePaginateRest, "composePaginateRest"), - isPaginatingEndpoint: /* @__PURE__ */ __name(() => isPaginatingEndpoint, "isPaginatingEndpoint"), - paginateRest: /* @__PURE__ */ __name(() => paginateRest, "paginateRest"), - paginatingEndpoints: /* @__PURE__ */ __name(() => paginatingEndpoints, "paginatingEndpoints") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var VERSION3 = "9.2.1"; - function normalizePaginatedListResponse(response) { - if (!response.data) { - return { - ...response, - data: [] - }; - } - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) - return response; - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - response.data.total_count = totalCount; - return response; - } - __name(normalizePaginatedListResponse, "normalizePaginatedListResponse"); - function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) - return { done: true }; - try { - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - url = ((normalizedResponse.headers.link || "").match( - /<([^>]+)>;\s*rel="next"/ - ) || [])[1]; - return { value: normalizedResponse }; - } catch (error) { - if (error.status !== 409) - throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - }) - }; - } - __name(iterator, "iterator"); - function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = void 0; - } - return gather( - octokit, - [], - iterator(octokit, route, parameters)[Symbol.asyncIterator](), - mapFn - ); - } - __name(paginate, "paginate"); - function gather(octokit, results, iterator2, mapFn) { - return iterator2.next().then((result) => { - if (result.done) { - return results; - } - let earlyExit = false; - function done() { - earlyExit = true; - } - __name(done, "done"); - results = results.concat( - mapFn ? mapFn(result.value, done) : result.value.data - ); - if (earlyExit) { - return results; - } - return gather(octokit, results, iterator2, mapFn); - }); - } - __name(gather, "gather"); - var composePaginateRest = Object.assign(paginate, { - iterator - }); - var paginatingEndpoints = [ - "GET /advisories", - "GET /app/hook/deliveries", - "GET /app/installation-requests", - "GET /app/installations", - "GET /assignments/{assignment_id}/accepted_assignments", - "GET /classrooms", - "GET /classrooms/{classroom_id}/assignments", - "GET /enterprises/{enterprise}/dependabot/alerts", - "GET /enterprises/{enterprise}/secret-scanning/alerts", - "GET /events", - "GET /gists", - "GET /gists/public", - "GET /gists/starred", - "GET /gists/{gist_id}/comments", - "GET /gists/{gist_id}/commits", - "GET /gists/{gist_id}/forks", - "GET /installation/repositories", - "GET /issues", - "GET /licenses", - "GET /marketplace_listing/plans", - "GET /marketplace_listing/plans/{plan_id}/accounts", - "GET /marketplace_listing/stubbed/plans", - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", - "GET /networks/{owner}/{repo}/events", - "GET /notifications", - "GET /organizations", - "GET /orgs/{org}/actions/cache/usage-by-repository", - "GET /orgs/{org}/actions/permissions/repositories", - "GET /orgs/{org}/actions/runners", - "GET /orgs/{org}/actions/secrets", - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", - "GET /orgs/{org}/actions/variables", - "GET /orgs/{org}/actions/variables/{name}/repositories", - "GET /orgs/{org}/blocks", - "GET /orgs/{org}/code-scanning/alerts", - "GET /orgs/{org}/codespaces", - "GET /orgs/{org}/codespaces/secrets", - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", - "GET /orgs/{org}/copilot/billing/seats", - "GET /orgs/{org}/dependabot/alerts", - "GET /orgs/{org}/dependabot/secrets", - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", - "GET /orgs/{org}/events", - "GET /orgs/{org}/failed_invitations", - "GET /orgs/{org}/hooks", - "GET /orgs/{org}/hooks/{hook_id}/deliveries", - "GET /orgs/{org}/installations", - "GET /orgs/{org}/invitations", - "GET /orgs/{org}/invitations/{invitation_id}/teams", - "GET /orgs/{org}/issues", - "GET /orgs/{org}/members", - "GET /orgs/{org}/members/{username}/codespaces", - "GET /orgs/{org}/migrations", - "GET /orgs/{org}/migrations/{migration_id}/repositories", - "GET /orgs/{org}/organization-roles/{role_id}/teams", - "GET /orgs/{org}/organization-roles/{role_id}/users", - "GET /orgs/{org}/outside_collaborators", - "GET /orgs/{org}/packages", - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - "GET /orgs/{org}/personal-access-token-requests", - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", - "GET /orgs/{org}/personal-access-tokens", - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", - "GET /orgs/{org}/projects", - "GET /orgs/{org}/properties/values", - "GET /orgs/{org}/public_members", - "GET /orgs/{org}/repos", - "GET /orgs/{org}/rulesets", - "GET /orgs/{org}/rulesets/rule-suites", - "GET /orgs/{org}/secret-scanning/alerts", - "GET /orgs/{org}/security-advisories", - "GET /orgs/{org}/teams", - "GET /orgs/{org}/teams/{team_slug}/discussions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/invitations", - "GET /orgs/{org}/teams/{team_slug}/members", - "GET /orgs/{org}/teams/{team_slug}/projects", - "GET /orgs/{org}/teams/{team_slug}/repos", - "GET /orgs/{org}/teams/{team_slug}/teams", - "GET /projects/columns/{column_id}/cards", - "GET /projects/{project_id}/collaborators", - "GET /projects/{project_id}/columns", - "GET /repos/{owner}/{repo}/actions/artifacts", - "GET /repos/{owner}/{repo}/actions/caches", - "GET /repos/{owner}/{repo}/actions/organization-secrets", - "GET /repos/{owner}/{repo}/actions/organization-variables", - "GET /repos/{owner}/{repo}/actions/runners", - "GET /repos/{owner}/{repo}/actions/runs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", - "GET /repos/{owner}/{repo}/actions/secrets", - "GET /repos/{owner}/{repo}/actions/variables", - "GET /repos/{owner}/{repo}/actions/workflows", - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", - "GET /repos/{owner}/{repo}/activity", - "GET /repos/{owner}/{repo}/assignees", - "GET /repos/{owner}/{repo}/branches", - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", - "GET /repos/{owner}/{repo}/code-scanning/alerts", - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - "GET /repos/{owner}/{repo}/code-scanning/analyses", - "GET /repos/{owner}/{repo}/codespaces", - "GET /repos/{owner}/{repo}/codespaces/devcontainers", - "GET /repos/{owner}/{repo}/codespaces/secrets", - "GET /repos/{owner}/{repo}/collaborators", - "GET /repos/{owner}/{repo}/comments", - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/commits", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", - "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", - "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", - "GET /repos/{owner}/{repo}/commits/{ref}/status", - "GET /repos/{owner}/{repo}/commits/{ref}/statuses", - "GET /repos/{owner}/{repo}/contributors", - "GET /repos/{owner}/{repo}/dependabot/alerts", - "GET /repos/{owner}/{repo}/dependabot/secrets", - "GET /repos/{owner}/{repo}/deployments", - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - "GET /repos/{owner}/{repo}/environments", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", - "GET /repos/{owner}/{repo}/events", - "GET /repos/{owner}/{repo}/forks", - "GET /repos/{owner}/{repo}/hooks", - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", - "GET /repos/{owner}/{repo}/invitations", - "GET /repos/{owner}/{repo}/issues", - "GET /repos/{owner}/{repo}/issues/comments", - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/issues/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", - "GET /repos/{owner}/{repo}/issues/{issue_number}/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", - "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", - "GET /repos/{owner}/{repo}/keys", - "GET /repos/{owner}/{repo}/labels", - "GET /repos/{owner}/{repo}/milestones", - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", - "GET /repos/{owner}/{repo}/notifications", - "GET /repos/{owner}/{repo}/pages/builds", - "GET /repos/{owner}/{repo}/projects", - "GET /repos/{owner}/{repo}/pulls", - "GET /repos/{owner}/{repo}/pulls/comments", - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", - "GET /repos/{owner}/{repo}/releases", - "GET /repos/{owner}/{repo}/releases/{release_id}/assets", - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", - "GET /repos/{owner}/{repo}/rules/branches/{branch}", - "GET /repos/{owner}/{repo}/rulesets", - "GET /repos/{owner}/{repo}/rulesets/rule-suites", - "GET /repos/{owner}/{repo}/secret-scanning/alerts", - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", - "GET /repos/{owner}/{repo}/security-advisories", - "GET /repos/{owner}/{repo}/stargazers", - "GET /repos/{owner}/{repo}/subscribers", - "GET /repos/{owner}/{repo}/tags", - "GET /repos/{owner}/{repo}/teams", - "GET /repos/{owner}/{repo}/topics", - "GET /repositories", - "GET /repositories/{repository_id}/environments/{environment_name}/secrets", - "GET /repositories/{repository_id}/environments/{environment_name}/variables", - "GET /search/code", - "GET /search/commits", - "GET /search/issues", - "GET /search/labels", - "GET /search/repositories", - "GET /search/topics", - "GET /search/users", - "GET /teams/{team_id}/discussions", - "GET /teams/{team_id}/discussions/{discussion_number}/comments", - "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /teams/{team_id}/discussions/{discussion_number}/reactions", - "GET /teams/{team_id}/invitations", - "GET /teams/{team_id}/members", - "GET /teams/{team_id}/projects", - "GET /teams/{team_id}/repos", - "GET /teams/{team_id}/teams", - "GET /user/blocks", - "GET /user/codespaces", - "GET /user/codespaces/secrets", - "GET /user/emails", - "GET /user/followers", - "GET /user/following", - "GET /user/gpg_keys", - "GET /user/installations", - "GET /user/installations/{installation_id}/repositories", - "GET /user/issues", - "GET /user/keys", - "GET /user/marketplace_purchases", - "GET /user/marketplace_purchases/stubbed", - "GET /user/memberships/orgs", - "GET /user/migrations", - "GET /user/migrations/{migration_id}/repositories", - "GET /user/orgs", - "GET /user/packages", - "GET /user/packages/{package_type}/{package_name}/versions", - "GET /user/public_emails", - "GET /user/repos", - "GET /user/repository_invitations", - "GET /user/social_accounts", - "GET /user/ssh_signing_keys", - "GET /user/starred", - "GET /user/subscriptions", - "GET /user/teams", - "GET /users", - "GET /users/{username}/events", - "GET /users/{username}/events/orgs/{org}", - "GET /users/{username}/events/public", - "GET /users/{username}/followers", - "GET /users/{username}/following", - "GET /users/{username}/gists", - "GET /users/{username}/gpg_keys", - "GET /users/{username}/keys", - "GET /users/{username}/orgs", - "GET /users/{username}/packages", - "GET /users/{username}/projects", - "GET /users/{username}/received_events", - "GET /users/{username}/received_events/public", - "GET /users/{username}/repos", - "GET /users/{username}/social_accounts", - "GET /users/{username}/ssh_signing_keys", - "GET /users/{username}/starred", - "GET /users/{username}/subscriptions" - ]; - function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } - } - __name(isPaginatingEndpoint, "isPaginatingEndpoint"); - function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; - } - __name(paginateRest, "paginateRest"); - paginateRest.VERSION = VERSION3; - } -}); - -// ../node_modules/@actions/github/lib/utils.js -var require_utils4 = __commonJS({ - "../node_modules/@actions/github/lib/utils.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getOctokitOptions = exports2.GitHub = exports2.defaults = exports2.context = void 0; - var Context = __importStar3(require_context()); - var Utils = __importStar3(require_utils3()); - var core_1 = require_dist_node8(); - var plugin_rest_endpoint_methods_1 = require_dist_node9(); - var plugin_paginate_rest_1 = require_dist_node10(); - exports2.context = new Context.Context(); - var baseUrl = Utils.getApiBaseUrl(); - exports2.defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl), - fetch: Utils.getProxyFetch(baseUrl) - } - }; - exports2.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports2.defaults); - function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; - } - __name(getOctokitOptions, "getOctokitOptions"); - exports2.getOctokitOptions = getOctokitOptions; - } -}); - -// ../node_modules/@actions/github/lib/github.js -var require_github = __commonJS({ - "../node_modules/@actions/github/lib/github.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getOctokit = exports2.context = void 0; - var Context = __importStar3(require_context()); - var utils_12 = require_utils4(); - exports2.context = new Context.Context(); - function getOctokit(token, options, ...additionalPlugins) { - const GitHubWithPlugins = utils_12.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins((0, utils_12.getOctokitOptions)(token, options)); - } - __name(getOctokit, "getOctokit"); - exports2.getOctokit = getOctokit; - } -}); - -// ../common/cli.json -var version, checksum; -var init_cli = __esm({ - "../common/cli.json"() { - version = "2024.3.2"; - checksum = { - windows_x86_64: "d2260f5d2cfd18c0795adcaf7ff57f77d2e7f9d0ce888531c443923388262eec", - linux_arm64: "8086568b6c62fb7f109c33d8f04f68cf060db4cd260a46d5f581fb093474ecc7", - darwin_arm64: "03962c7c513b6a1ca67164bbcb65f8673a60cff6fe13f618598439b45d352084", - darwin_x86_64: "e70904e0c4fc1d56a8d5f1e2682ccbe168b41b973dac2c5fe42f9b31837dbd86", - windows_arm64: "940e9a84b03db0b5e67f93299879e804984b3893577a58b2424dbc5c2db39cbe", - linux_x86_64: "a7a47898971fe2c7db08d10afc40292736df33e9b33d9d84b8672f3ec246da04" - }; - } -}); - -// ../node_modules/process-nextick-args/index.js -var require_process_nextick_args = __commonJS({ - "../node_modules/process-nextick-args/index.js"(exports2, module2) { - "use strict"; - if (typeof process === "undefined" || !process.version || process.version.indexOf("v0.") === 0 || process.version.indexOf("v1.") === 0 && process.version.indexOf("v1.8.") !== 0) { - module2.exports = { nextTick }; - } else { - module2.exports = process; - } - function nextTick(fn, arg1, arg2, arg3) { - if (typeof fn !== "function") { - throw new TypeError('"callback" argument must be a function'); - } - var len = arguments.length; - var args, i; - switch (len) { - case 0: - case 1: - return process.nextTick(fn); - case 2: - return process.nextTick(/* @__PURE__ */ __name(function afterTickOne() { - fn.call(null, arg1); - }, "afterTickOne")); - case 3: - return process.nextTick(/* @__PURE__ */ __name(function afterTickTwo() { - fn.call(null, arg1, arg2); - }, "afterTickTwo")); - case 4: - return process.nextTick(/* @__PURE__ */ __name(function afterTickThree() { - fn.call(null, arg1, arg2, arg3); - }, "afterTickThree")); - default: - args = new Array(len - 1); - i = 0; - while (i < args.length) { - args[i++] = arguments[i]; - } - return process.nextTick(/* @__PURE__ */ __name(function afterTick() { - fn.apply(null, args); - }, "afterTick")); - } - } - __name(nextTick, "nextTick"); - } -}); - -// ../node_modules/jszip/node_modules/isarray/index.js -var require_isarray = __commonJS({ - "../node_modules/jszip/node_modules/isarray/index.js"(exports2, module2) { - var toString = {}.toString; - module2.exports = Array.isArray || function(arr) { - return toString.call(arr) == "[object Array]"; - }; - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/stream.js -var require_stream = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/stream.js"(exports2, module2) { - module2.exports = require("stream"); - } -}); - -// ../node_modules/jszip/node_modules/safe-buffer/index.js -var require_safe_buffer = __commonJS({ - "../node_modules/jszip/node_modules/safe-buffer/index.js"(exports2, module2) { - var buffer = require("buffer"); - var Buffer2 = buffer.Buffer; - function copyProps(src, dst) { - for (var key in src) { - dst[key] = src[key]; - } - } - __name(copyProps, "copyProps"); - if (Buffer2.from && Buffer2.alloc && Buffer2.allocUnsafe && Buffer2.allocUnsafeSlow) { - module2.exports = buffer; - } else { - copyProps(buffer, exports2); - exports2.Buffer = SafeBuffer; - } - function SafeBuffer(arg, encodingOrOffset, length) { - return Buffer2(arg, encodingOrOffset, length); - } - __name(SafeBuffer, "SafeBuffer"); - copyProps(Buffer2, SafeBuffer); - SafeBuffer.from = function(arg, encodingOrOffset, length) { - if (typeof arg === "number") { - throw new TypeError("Argument must not be a number"); - } - return Buffer2(arg, encodingOrOffset, length); - }; - SafeBuffer.alloc = function(size, fill, encoding) { - if (typeof size !== "number") { - throw new TypeError("Argument must be a number"); - } - var buf = Buffer2(size); - if (fill !== void 0) { - if (typeof encoding === "string") { - buf.fill(fill, encoding); - } else { - buf.fill(fill); - } - } else { - buf.fill(0); - } - return buf; - }; - SafeBuffer.allocUnsafe = function(size) { - if (typeof size !== "number") { - throw new TypeError("Argument must be a number"); - } - return Buffer2(size); - }; - SafeBuffer.allocUnsafeSlow = function(size) { - if (typeof size !== "number") { - throw new TypeError("Argument must be a number"); - } - return buffer.SlowBuffer(size); - }; - } -}); - -// ../node_modules/core-util-is/lib/util.js -var require_util8 = __commonJS({ - "../node_modules/core-util-is/lib/util.js"(exports2) { - function isArray(arg) { - if (Array.isArray) { - return Array.isArray(arg); - } - return objectToString(arg) === "[object Array]"; - } - __name(isArray, "isArray"); - exports2.isArray = isArray; - function isBoolean(arg) { - return typeof arg === "boolean"; - } - __name(isBoolean, "isBoolean"); - exports2.isBoolean = isBoolean; - function isNull(arg) { - return arg === null; - } - __name(isNull, "isNull"); - exports2.isNull = isNull; - function isNullOrUndefined(arg) { - return arg == null; - } - __name(isNullOrUndefined, "isNullOrUndefined"); - exports2.isNullOrUndefined = isNullOrUndefined; - function isNumber(arg) { - return typeof arg === "number"; - } - __name(isNumber, "isNumber"); - exports2.isNumber = isNumber; - function isString(arg) { - return typeof arg === "string"; - } - __name(isString, "isString"); - exports2.isString = isString; - function isSymbol(arg) { - return typeof arg === "symbol"; - } - __name(isSymbol, "isSymbol"); - exports2.isSymbol = isSymbol; - function isUndefined(arg) { - return arg === void 0; - } - __name(isUndefined, "isUndefined"); - exports2.isUndefined = isUndefined; - function isRegExp(re2) { - return objectToString(re2) === "[object RegExp]"; - } - __name(isRegExp, "isRegExp"); - exports2.isRegExp = isRegExp; - function isObject(arg) { - return typeof arg === "object" && arg !== null; - } - __name(isObject, "isObject"); - exports2.isObject = isObject; - function isDate(d) { - return objectToString(d) === "[object Date]"; - } - __name(isDate, "isDate"); - exports2.isDate = isDate; - function isError(e) { - return objectToString(e) === "[object Error]" || e instanceof Error; - } - __name(isError, "isError"); - exports2.isError = isError; - function isFunction(arg) { - return typeof arg === "function"; - } - __name(isFunction, "isFunction"); - exports2.isFunction = isFunction; - function isPrimitive(arg) { - return arg === null || typeof arg === "boolean" || typeof arg === "number" || typeof arg === "string" || typeof arg === "symbol" || // ES6 symbol - typeof arg === "undefined"; - } - __name(isPrimitive, "isPrimitive"); - exports2.isPrimitive = isPrimitive; - exports2.isBuffer = require("buffer").Buffer.isBuffer; - function objectToString(o) { - return Object.prototype.toString.call(o); - } - __name(objectToString, "objectToString"); - } -}); - -// ../node_modules/inherits/inherits_browser.js -var require_inherits_browser = __commonJS({ - "../node_modules/inherits/inherits_browser.js"(exports2, module2) { - if (typeof Object.create === "function") { - module2.exports = /* @__PURE__ */ __name(function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor; - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); - } - }, "inherits"); - } else { - module2.exports = /* @__PURE__ */ __name(function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor; - var TempCtor = /* @__PURE__ */ __name(function() { - }, "TempCtor"); - TempCtor.prototype = superCtor.prototype; - ctor.prototype = new TempCtor(); - ctor.prototype.constructor = ctor; - } - }, "inherits"); - } - } -}); - -// ../node_modules/inherits/inherits.js -var require_inherits = __commonJS({ - "../node_modules/inherits/inherits.js"(exports2, module2) { - try { - util = require("util"); - if (typeof util.inherits !== "function") throw ""; - module2.exports = util.inherits; - } catch (e) { - module2.exports = require_inherits_browser(); - } - var util; - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/BufferList.js -var require_BufferList = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/BufferList.js"(exports2, module2) { - "use strict"; - function _classCallCheck(instance, Constructor) { - if (!(instance instanceof Constructor)) { - throw new TypeError("Cannot call a class as a function"); - } - } - __name(_classCallCheck, "_classCallCheck"); - var Buffer2 = require_safe_buffer().Buffer; - var util = require("util"); - function copyBuffer(src, target, offset) { - src.copy(target, offset); - } - __name(copyBuffer, "copyBuffer"); - module2.exports = function() { - function BufferList() { - _classCallCheck(this, BufferList); - this.head = null; - this.tail = null; - this.length = 0; - } - __name(BufferList, "BufferList"); - BufferList.prototype.push = /* @__PURE__ */ __name(function push(v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry; - else this.head = entry; - this.tail = entry; - ++this.length; - }, "push"); - BufferList.prototype.unshift = /* @__PURE__ */ __name(function unshift(v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; - }, "unshift"); - BufferList.prototype.shift = /* @__PURE__ */ __name(function shift() { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null; - else this.head = this.head.next; - --this.length; - return ret; - }, "shift"); - BufferList.prototype.clear = /* @__PURE__ */ __name(function clear() { - this.head = this.tail = null; - this.length = 0; - }, "clear"); - BufferList.prototype.join = /* @__PURE__ */ __name(function join(s) { - if (this.length === 0) return ""; - var p = this.head; - var ret = "" + p.data; - while (p = p.next) { - ret += s + p.data; - } - return ret; - }, "join"); - BufferList.prototype.concat = /* @__PURE__ */ __name(function concat(n) { - if (this.length === 0) return Buffer2.alloc(0); - var ret = Buffer2.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - copyBuffer(p.data, ret, i); - i += p.data.length; - p = p.next; - } - return ret; - }, "concat"); - return BufferList; - }(); - if (util && util.inspect && util.inspect.custom) { - module2.exports.prototype[util.inspect.custom] = function() { - var obj = util.inspect({ length: this.length }); - return this.constructor.name + " " + obj; - }; - } - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/destroy.js -var require_destroy = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/internal/streams/destroy.js"(exports2, module2) { - "use strict"; - var pna = require_process_nextick_args(); - function destroy(err, cb) { - var _this = this; - var readableDestroyed = this._readableState && this._readableState.destroyed; - var writableDestroyed = this._writableState && this._writableState.destroyed; - if (readableDestroyed || writableDestroyed) { - if (cb) { - cb(err); - } else if (err) { - if (!this._writableState) { - pna.nextTick(emitErrorNT, this, err); - } else if (!this._writableState.errorEmitted) { - this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, this, err); - } - } - return this; - } - if (this._readableState) { - this._readableState.destroyed = true; - } - if (this._writableState) { - this._writableState.destroyed = true; - } - this._destroy(err || null, function(err2) { - if (!cb && err2) { - if (!_this._writableState) { - pna.nextTick(emitErrorNT, _this, err2); - } else if (!_this._writableState.errorEmitted) { - _this._writableState.errorEmitted = true; - pna.nextTick(emitErrorNT, _this, err2); - } - } else if (cb) { - cb(err2); - } - }); - return this; - } - __name(destroy, "destroy"); - function undestroy() { - if (this._readableState) { - this._readableState.destroyed = false; - this._readableState.reading = false; - this._readableState.ended = false; - this._readableState.endEmitted = false; - } - if (this._writableState) { - this._writableState.destroyed = false; - this._writableState.ended = false; - this._writableState.ending = false; - this._writableState.finalCalled = false; - this._writableState.prefinished = false; - this._writableState.finished = false; - this._writableState.errorEmitted = false; - } - } - __name(undestroy, "undestroy"); - function emitErrorNT(self2, err) { - self2.emit("error", err); - } - __name(emitErrorNT, "emitErrorNT"); - module2.exports = { - destroy, - undestroy - }; - } -}); - -// ../node_modules/util-deprecate/node.js -var require_node = __commonJS({ - "../node_modules/util-deprecate/node.js"(exports2, module2) { - module2.exports = require("util").deprecate; - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_writable.js -var require_stream_writable = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/_stream_writable.js"(exports2, module2) { - "use strict"; - var pna = require_process_nextick_args(); - module2.exports = Writable; - function CorkedRequest(state) { - var _this = this; - this.next = null; - this.entry = null; - this.finish = function() { - onCorkedFinish(_this, state); - }; - } - __name(CorkedRequest, "CorkedRequest"); - var asyncWrite = !process.browser && ["v0.10", "v0.9."].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; - var Duplex; - Writable.WritableState = WritableState; - var util = Object.create(require_util8()); - util.inherits = require_inherits(); - var internalUtil = { - deprecate: require_node() - }; - var Stream = require_stream(); - var Buffer2 = require_safe_buffer().Buffer; - var OurUint8Array = (typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : {}).Uint8Array || function() { - }; - function _uint8ArrayToBuffer(chunk) { - return Buffer2.from(chunk); - } - __name(_uint8ArrayToBuffer, "_uint8ArrayToBuffer"); - function _isUint8Array(obj) { - return Buffer2.isBuffer(obj) || obj instanceof OurUint8Array; - } - __name(_isUint8Array, "_isUint8Array"); - var destroyImpl = require_destroy(); - util.inherits(Writable, Stream); - function nop() { - } - __name(nop, "nop"); - function WritableState(options, stream) { - Duplex = Duplex || require_stream_duplex(); - options = options || {}; - var isDuplex = stream instanceof Duplex; - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - var hwm = options.highWaterMark; - var writableHwm = options.writableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (hwm || hwm === 0) this.highWaterMark = hwm; - else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm; - else this.highWaterMark = defaultHwm; - this.highWaterMark = Math.floor(this.highWaterMark); - this.finalCalled = false; - this.needDrain = false; - this.ending = false; - this.ended = false; - this.finished = false; - this.destroyed = false; - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - this.defaultEncoding = options.defaultEncoding || "utf8"; - this.length = 0; - this.writing = false; - this.corked = 0; - this.sync = true; - this.bufferProcessing = false; - this.onwrite = function(er) { - onwrite(stream, er); - }; - this.writecb = null; - this.writelen = 0; - this.bufferedRequest = null; - this.lastBufferedRequest = null; - this.pendingcb = 0; - this.prefinished = false; - this.errorEmitted = false; - this.bufferedRequestCount = 0; - this.corkedRequestsFree = new CorkedRequest(this); - } - __name(WritableState, "WritableState"); - WritableState.prototype.getBuffer = /* @__PURE__ */ __name(function getBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; - }, "getBuffer"); - (function() { - try { - Object.defineProperty(WritableState.prototype, "buffer", { - get: internalUtil.deprecate(function() { - return this.getBuffer(); - }, "_writableState.buffer is deprecated. Use _writableState.getBuffer instead.", "DEP0003") - }); - } catch (_2) { - } - })(); - var realHasInstance; - if (typeof Symbol === "function" && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === "function") { - realHasInstance = Function.prototype[Symbol.hasInstance]; - Object.defineProperty(Writable, Symbol.hasInstance, { - value: /* @__PURE__ */ __name(function(object) { - if (realHasInstance.call(this, object)) return true; - if (this !== Writable) return false; - return object && object._writableState instanceof WritableState; - }, "value") - }); - } else { - realHasInstance = /* @__PURE__ */ __name(function(object) { - return object instanceof this; - }, "realHasInstance"); - } - function Writable(options) { - Duplex = Duplex || require_stream_duplex(); - if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { - return new Writable(options); - } - this._writableState = new WritableState(options, this); - this.writable = true; - if (options) { - if (typeof options.write === "function") this._write = options.write; - if (typeof options.writev === "function") this._writev = options.writev; - if (typeof options.destroy === "function") this._destroy = options.destroy; - if (typeof options.final === "function") this._final = options.final; - } - Stream.call(this); - } - __name(Writable, "Writable"); - Writable.prototype.pipe = function() { - this.emit("error", new Error("Cannot pipe, not readable")); - }; - function writeAfterEnd(stream, cb) { - var er = new Error("write after end"); - stream.emit("error", er); - pna.nextTick(cb, er); - } - __name(writeAfterEnd, "writeAfterEnd"); - function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; - if (chunk === null) { - er = new TypeError("May not write null values to stream"); - } else if (typeof chunk !== "string" && chunk !== void 0 && !state.objectMode) { - er = new TypeError("Invalid non-string/buffer chunk"); - } - if (er) { - stream.emit("error", er); - pna.nextTick(cb, er); - valid = false; - } - return valid; - } - __name(validChunk, "validChunk"); - Writable.prototype.write = function(chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - var isBuf = !state.objectMode && _isUint8Array(chunk); - if (isBuf && !Buffer2.isBuffer(chunk)) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (typeof encoding === "function") { - cb = encoding; - encoding = null; - } - if (isBuf) encoding = "buffer"; - else if (!encoding) encoding = state.defaultEncoding; - if (typeof cb !== "function") cb = nop; - if (state.ended) writeAfterEnd(this, cb); - else if (isBuf || validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); - } - return ret; - }; - Writable.prototype.cork = function() { - var state = this._writableState; - state.corked++; - }; - Writable.prototype.uncork = function() { - var state = this._writableState; - if (state.corked) { - state.corked--; - if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } - }; - Writable.prototype.setDefaultEncoding = /* @__PURE__ */ __name(function setDefaultEncoding(encoding) { - if (typeof encoding === "string") encoding = encoding.toLowerCase(); - if (!(["hex", "utf8", "utf-8", "ascii", "binary", "base64", "ucs2", "ucs-2", "utf16le", "utf-16le", "raw"].indexOf((encoding + "").toLowerCase()) > -1)) throw new TypeError("Unknown encoding: " + encoding); - this._writableState.defaultEncoding = encoding; - return this; - }, "setDefaultEncoding"); - function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === "string") { - chunk = Buffer2.from(chunk, encoding); - } - return chunk; - } - __name(decodeChunk, "decodeChunk"); - Object.defineProperty(Writable.prototype, "writableHighWaterMark", { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: /* @__PURE__ */ __name(function() { - return this._writableState.highWaterMark; - }, "get") - }); - function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { - if (!isBuf) { - var newChunk = decodeChunk(state, chunk, encoding); - if (chunk !== newChunk) { - isBuf = true; - encoding = "buffer"; - chunk = newChunk; - } - } - var len = state.objectMode ? 1 : chunk.length; - state.length += len; - var ret = state.length < state.highWaterMark; - if (!ret) state.needDrain = true; - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = { - chunk, - encoding, - isBuf, - callback: cb, - next: null - }; - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - return ret; - } - __name(writeOrBuffer, "writeOrBuffer"); - function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite); - else stream._write(chunk, encoding, state.onwrite); - state.sync = false; - } - __name(doWrite, "doWrite"); - function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) { - pna.nextTick(cb, er); - pna.nextTick(finishMaybe, stream, state); - stream._writableState.errorEmitted = true; - stream.emit("error", er); - } else { - cb(er); - stream._writableState.errorEmitted = true; - stream.emit("error", er); - finishMaybe(stream, state); - } - } - __name(onwriteError, "onwriteError"); - function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; - } - __name(onwriteStateUpdate, "onwriteStateUpdate"); - function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - onwriteStateUpdate(state); - if (er) onwriteError(stream, state, sync, er, cb); - else { - var finished = needFinish(state); - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - if (sync) { - asyncWrite(afterWrite, stream, state, finished, cb); - } else { - afterWrite(stream, state, finished, cb); - } - } - } - __name(onwrite, "onwrite"); - function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); - } - __name(afterWrite, "afterWrite"); - function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit("drain"); - } - } - __name(onwriteDrain, "onwriteDrain"); - function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - if (stream._writev && entry && entry.next) { - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - var count = 0; - var allBuffers = true; - while (entry) { - buffer[count] = entry; - if (!entry.isBuf) allBuffers = false; - entry = entry.next; - count += 1; - } - buffer.allBuffers = allBuffers; - doWrite(stream, state, true, state.length, buffer, "", holder.finish); - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - state.bufferedRequestCount = 0; - } else { - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - state.bufferedRequestCount--; - if (state.writing) { - break; - } - } - if (entry === null) state.lastBufferedRequest = null; - } - state.bufferedRequest = entry; - state.bufferProcessing = false; - } - __name(clearBuffer, "clearBuffer"); - Writable.prototype._write = function(chunk, encoding, cb) { - cb(new Error("_write() is not implemented")); - }; - Writable.prototype._writev = null; - Writable.prototype.end = function(chunk, encoding, cb) { - var state = this._writableState; - if (typeof chunk === "function") { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === "function") { - cb = encoding; - encoding = null; - } - if (chunk !== null && chunk !== void 0) this.write(chunk, encoding); - if (state.corked) { - state.corked = 1; - this.uncork(); - } - if (!state.ending) endWritable(this, state, cb); - }; - function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; - } - __name(needFinish, "needFinish"); - function callFinal(stream, state) { - stream._final(function(err) { - state.pendingcb--; - if (err) { - stream.emit("error", err); - } - state.prefinished = true; - stream.emit("prefinish"); - finishMaybe(stream, state); - }); - } - __name(callFinal, "callFinal"); - function prefinish(stream, state) { - if (!state.prefinished && !state.finalCalled) { - if (typeof stream._final === "function") { - state.pendingcb++; - state.finalCalled = true; - pna.nextTick(callFinal, stream, state); - } else { - state.prefinished = true; - stream.emit("prefinish"); - } - } - } - __name(prefinish, "prefinish"); - function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - prefinish(stream, state); - if (state.pendingcb === 0) { - state.finished = true; - stream.emit("finish"); - } - } - return need; - } - __name(finishMaybe, "finishMaybe"); - function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) pna.nextTick(cb); - else stream.once("finish", cb); - } - state.ended = true; - stream.writable = false; - } - __name(endWritable, "endWritable"); - function onCorkedFinish(corkReq, state, err) { - var entry = corkReq.entry; - corkReq.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - state.corkedRequestsFree.next = corkReq; - } - __name(onCorkedFinish, "onCorkedFinish"); - Object.defineProperty(Writable.prototype, "destroyed", { - get: /* @__PURE__ */ __name(function() { - if (this._writableState === void 0) { - return false; - } - return this._writableState.destroyed; - }, "get"), - set: /* @__PURE__ */ __name(function(value) { - if (!this._writableState) { - return; - } - this._writableState.destroyed = value; - }, "set") - }); - Writable.prototype.destroy = destroyImpl.destroy; - Writable.prototype._undestroy = destroyImpl.undestroy; - Writable.prototype._destroy = function(err, cb) { - this.end(); - cb(err); - }; - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_duplex.js -var require_stream_duplex = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/_stream_duplex.js"(exports2, module2) { - "use strict"; - var pna = require_process_nextick_args(); - var objectKeys = Object.keys || function(obj) { - var keys2 = []; - for (var key in obj) { - keys2.push(key); - } - return keys2; - }; - module2.exports = Duplex; - var util = Object.create(require_util8()); - util.inherits = require_inherits(); - var Readable = require_stream_readable(); - var Writable = require_stream_writable(); - util.inherits(Duplex, Readable); - { - keys = objectKeys(Writable.prototype); - for (v = 0; v < keys.length; v++) { - method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; - } - } - var keys; - var method; - var v; - function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - Readable.call(this, options); - Writable.call(this, options); - if (options && options.readable === false) this.readable = false; - if (options && options.writable === false) this.writable = false; - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - this.once("end", onend); - } - __name(Duplex, "Duplex"); - Object.defineProperty(Duplex.prototype, "writableHighWaterMark", { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: /* @__PURE__ */ __name(function() { - return this._writableState.highWaterMark; - }, "get") - }); - function onend() { - if (this.allowHalfOpen || this._writableState.ended) return; - pna.nextTick(onEndNT, this); - } - __name(onend, "onend"); - function onEndNT(self2) { - self2.end(); - } - __name(onEndNT, "onEndNT"); - Object.defineProperty(Duplex.prototype, "destroyed", { - get: /* @__PURE__ */ __name(function() { - if (this._readableState === void 0 || this._writableState === void 0) { - return false; - } - return this._readableState.destroyed && this._writableState.destroyed; - }, "get"), - set: /* @__PURE__ */ __name(function(value) { - if (this._readableState === void 0 || this._writableState === void 0) { - return; - } - this._readableState.destroyed = value; - this._writableState.destroyed = value; - }, "set") - }); - Duplex.prototype._destroy = function(err, cb) { - this.push(null); - this.end(); - pna.nextTick(cb, err); - }; - } -}); - -// ../node_modules/jszip/node_modules/string_decoder/lib/string_decoder.js -var require_string_decoder = __commonJS({ - "../node_modules/jszip/node_modules/string_decoder/lib/string_decoder.js"(exports2) { - "use strict"; - var Buffer2 = require_safe_buffer().Buffer; - var isEncoding = Buffer2.isEncoding || function(encoding) { - encoding = "" + encoding; - switch (encoding && encoding.toLowerCase()) { - case "hex": - case "utf8": - case "utf-8": - case "ascii": - case "binary": - case "base64": - case "ucs2": - case "ucs-2": - case "utf16le": - case "utf-16le": - case "raw": - return true; - default: - return false; - } - }; - function _normalizeEncoding(enc) { - if (!enc) return "utf8"; - var retried; - while (true) { - switch (enc) { - case "utf8": - case "utf-8": - return "utf8"; - case "ucs2": - case "ucs-2": - case "utf16le": - case "utf-16le": - return "utf16le"; - case "latin1": - case "binary": - return "latin1"; - case "base64": - case "ascii": - case "hex": - return enc; - default: - if (retried) return; - enc = ("" + enc).toLowerCase(); - retried = true; - } - } - } - __name(_normalizeEncoding, "_normalizeEncoding"); - function normalizeEncoding(enc) { - var nenc = _normalizeEncoding(enc); - if (typeof nenc !== "string" && (Buffer2.isEncoding === isEncoding || !isEncoding(enc))) throw new Error("Unknown encoding: " + enc); - return nenc || enc; - } - __name(normalizeEncoding, "normalizeEncoding"); - exports2.StringDecoder = StringDecoder; - function StringDecoder(encoding) { - this.encoding = normalizeEncoding(encoding); - var nb; - switch (this.encoding) { - case "utf16le": - this.text = utf16Text; - this.end = utf16End; - nb = 4; - break; - case "utf8": - this.fillLast = utf8FillLast; - nb = 4; - break; - case "base64": - this.text = base64Text; - this.end = base64End; - nb = 3; - break; - default: - this.write = simpleWrite; - this.end = simpleEnd; - return; - } - this.lastNeed = 0; - this.lastTotal = 0; - this.lastChar = Buffer2.allocUnsafe(nb); - } - __name(StringDecoder, "StringDecoder"); - StringDecoder.prototype.write = function(buf) { - if (buf.length === 0) return ""; - var r; - var i; - if (this.lastNeed) { - r = this.fillLast(buf); - if (r === void 0) return ""; - i = this.lastNeed; - this.lastNeed = 0; - } else { - i = 0; - } - if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); - return r || ""; - }; - StringDecoder.prototype.end = utf8End; - StringDecoder.prototype.text = utf8Text; - StringDecoder.prototype.fillLast = function(buf) { - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); - this.lastNeed -= buf.length; - }; - function utf8CheckByte(byte) { - if (byte <= 127) return 0; - else if (byte >> 5 === 6) return 2; - else if (byte >> 4 === 14) return 3; - else if (byte >> 3 === 30) return 4; - return byte >> 6 === 2 ? -1 : -2; - } - __name(utf8CheckByte, "utf8CheckByte"); - function utf8CheckIncomplete(self2, buf, i) { - var j = buf.length - 1; - if (j < i) return 0; - var nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self2.lastNeed = nb - 1; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) self2.lastNeed = nb - 2; - return nb; - } - if (--j < i || nb === -2) return 0; - nb = utf8CheckByte(buf[j]); - if (nb >= 0) { - if (nb > 0) { - if (nb === 2) nb = 0; - else self2.lastNeed = nb - 3; - } - return nb; - } - return 0; - } - __name(utf8CheckIncomplete, "utf8CheckIncomplete"); - function utf8CheckExtraBytes(self2, buf, p) { - if ((buf[0] & 192) !== 128) { - self2.lastNeed = 0; - return "\uFFFD"; - } - if (self2.lastNeed > 1 && buf.length > 1) { - if ((buf[1] & 192) !== 128) { - self2.lastNeed = 1; - return "\uFFFD"; - } - if (self2.lastNeed > 2 && buf.length > 2) { - if ((buf[2] & 192) !== 128) { - self2.lastNeed = 2; - return "\uFFFD"; - } - } - } - } - __name(utf8CheckExtraBytes, "utf8CheckExtraBytes"); - function utf8FillLast(buf) { - var p = this.lastTotal - this.lastNeed; - var r = utf8CheckExtraBytes(this, buf, p); - if (r !== void 0) return r; - if (this.lastNeed <= buf.length) { - buf.copy(this.lastChar, p, 0, this.lastNeed); - return this.lastChar.toString(this.encoding, 0, this.lastTotal); - } - buf.copy(this.lastChar, p, 0, buf.length); - this.lastNeed -= buf.length; - } - __name(utf8FillLast, "utf8FillLast"); - function utf8Text(buf, i) { - var total = utf8CheckIncomplete(this, buf, i); - if (!this.lastNeed) return buf.toString("utf8", i); - this.lastTotal = total; - var end = buf.length - (total - this.lastNeed); - buf.copy(this.lastChar, 0, end); - return buf.toString("utf8", i, end); - } - __name(utf8Text, "utf8Text"); - function utf8End(buf) { - var r = buf && buf.length ? this.write(buf) : ""; - if (this.lastNeed) return r + "\uFFFD"; - return r; - } - __name(utf8End, "utf8End"); - function utf16Text(buf, i) { - if ((buf.length - i) % 2 === 0) { - var r = buf.toString("utf16le", i); - if (r) { - var c = r.charCodeAt(r.length - 1); - if (c >= 55296 && c <= 56319) { - this.lastNeed = 2; - this.lastTotal = 4; - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - return r.slice(0, -1); - } - } - return r; - } - this.lastNeed = 1; - this.lastTotal = 2; - this.lastChar[0] = buf[buf.length - 1]; - return buf.toString("utf16le", i, buf.length - 1); - } - __name(utf16Text, "utf16Text"); - function utf16End(buf) { - var r = buf && buf.length ? this.write(buf) : ""; - if (this.lastNeed) { - var end = this.lastTotal - this.lastNeed; - return r + this.lastChar.toString("utf16le", 0, end); - } - return r; - } - __name(utf16End, "utf16End"); - function base64Text(buf, i) { - var n = (buf.length - i) % 3; - if (n === 0) return buf.toString("base64", i); - this.lastNeed = 3 - n; - this.lastTotal = 3; - if (n === 1) { - this.lastChar[0] = buf[buf.length - 1]; - } else { - this.lastChar[0] = buf[buf.length - 2]; - this.lastChar[1] = buf[buf.length - 1]; - } - return buf.toString("base64", i, buf.length - n); - } - __name(base64Text, "base64Text"); - function base64End(buf) { - var r = buf && buf.length ? this.write(buf) : ""; - if (this.lastNeed) return r + this.lastChar.toString("base64", 0, 3 - this.lastNeed); - return r; - } - __name(base64End, "base64End"); - function simpleWrite(buf) { - return buf.toString(this.encoding); - } - __name(simpleWrite, "simpleWrite"); - function simpleEnd(buf) { - return buf && buf.length ? this.write(buf) : ""; - } - __name(simpleEnd, "simpleEnd"); - } -}); - -// ../node_modules/jszip/node_modules/readable-stream/lib/_stream_readable.js -var require_stream_readable = __commonJS({ - "../node_modules/jszip/node_modules/readable-stream/lib/_stream_readable.js"(exports2, module2) { - "use strict"; - var pna = require_process_nextick_args(); - module2.exports = Readable; - var isArray = require_isarray(); - var Duplex; - Readable.ReadableState = ReadableState; - var EE = require("events").EventEmitter; - var EElistenerCount = /* @__PURE__ */ __name(function(emitter, type) { - return emitter.listeners(type).length; - }, "EElistenerCount"); - var Stream = require_stream(); - var Buffer2 = require_safe_buffer().Buffer; - var OurUint8Array = (typeof global !== "undefined" ? global : typeof window !== "undefined" ? window : typeof self !== "undefined" ? self : {}).Uint8Array || function() { - }; - function _uint8ArrayToBuffer(chunk) { - return Buffer2.from(chunk); - } - __name(_uint8ArrayToBuffer, "_uint8ArrayToBuffer"); - function _isUint8Array(obj) { - return Buffer2.isBuffer(obj) || obj instanceof OurUint8Array; - } - __name(_isUint8Array, "_isUint8Array"); - var util = Object.create(require_util8()); - util.inherits = require_inherits(); - var debugUtil = require("util"); - var debug = void 0; - if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog("stream"); - } else { - debug = /* @__PURE__ */ __name(function() { - }, "debug"); - } - var BufferList = require_BufferList(); - var destroyImpl = require_destroy(); - var StringDecoder; - util.inherits(Readable, Stream); - var kProxyEvents = ["error", "close", "destroy", "pause", "resume"]; - function prependListener(emitter, event, fn) { - if (typeof emitter.prependListener === "function") return emitter.prependListener(event, fn); - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn); - else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn); - else emitter._events[event] = [fn, emitter._events[event]]; - } - __name(prependListener, "prependListener"); - function ReadableState(options, stream) { - Duplex = Duplex || require_stream_duplex(); - options = options || {}; - var isDuplex = stream instanceof Duplex; - this.objectMode = !!options.objectMode; - if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - var hwm = options.highWaterMark; - var readableHwm = options.readableHighWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - if (hwm || hwm === 0) this.highWaterMark = hwm; - else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm; - else this.highWaterMark = defaultHwm; - this.highWaterMark = Math.floor(this.highWaterMark); - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - this.sync = true; - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - this.destroyed = false; - this.defaultEncoding = options.defaultEncoding || "utf8"; - this.awaitDrain = 0; - this.readingMore = false; - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require_string_decoder().StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } - } - __name(ReadableState, "ReadableState"); - function Readable(options) { - Duplex = Duplex || require_stream_duplex(); - if (!(this instanceof Readable)) return new Readable(options); - this._readableState = new ReadableState(options, this); - this.readable = true; - if (options) { - if (typeof options.read === "function") this._read = options.read; - if (typeof options.destroy === "function") this._destroy = options.destroy; - } - Stream.call(this); - } - __name(Readable, "Readable"); - Object.defineProperty(Readable.prototype, "destroyed", { - get: /* @__PURE__ */ __name(function() { - if (this._readableState === void 0) { - return false; - } - return this._readableState.destroyed; - }, "get"), - set: /* @__PURE__ */ __name(function(value) { - if (!this._readableState) { - return; - } - this._readableState.destroyed = value; - }, "set") - }); - Readable.prototype.destroy = destroyImpl.destroy; - Readable.prototype._undestroy = destroyImpl.undestroy; - Readable.prototype._destroy = function(err, cb) { - this.push(null); - cb(err); - }; - Readable.prototype.push = function(chunk, encoding) { - var state = this._readableState; - var skipChunkCheck; - if (!state.objectMode) { - if (typeof chunk === "string") { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = Buffer2.from(chunk, encoding); - encoding = ""; - } - skipChunkCheck = true; - } - } else { - skipChunkCheck = true; - } - return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); - }; - Readable.prototype.unshift = function(chunk) { - return readableAddChunk(this, chunk, null, true, false); - }; - function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { - var state = stream._readableState; - if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else { - var er; - if (!skipChunkCheck) er = chunkInvalid(state, chunk); - if (er) { - stream.emit("error", er); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (typeof chunk !== "string" && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer2.prototype) { - chunk = _uint8ArrayToBuffer(chunk); - } - if (addToFront) { - if (state.endEmitted) stream.emit("error", new Error("stream.unshift() after end event")); - else addChunk(stream, state, chunk, true); - } else if (state.ended) { - stream.emit("error", new Error("stream.push() after EOF")); - } else { - state.reading = false; - if (state.decoder && !encoding) { - chunk = state.decoder.write(chunk); - if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false); - else maybeReadMore(stream, state); - } else { - addChunk(stream, state, chunk, false); - } - } - } else if (!addToFront) { - state.reading = false; - } - } - return needMoreData(state); - } - __name(readableAddChunk, "readableAddChunk"); - function addChunk(stream, state, chunk, addToFront) { - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit("data", chunk); - stream.read(0); - } else { - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk); - else state.buffer.push(chunk); - if (state.needReadable) emitReadable(stream); - } - maybeReadMore(stream, state); - } - __name(addChunk, "addChunk"); - function chunkInvalid(state, chunk) { - var er; - if (!_isUint8Array(chunk) && typeof chunk !== "string" && chunk !== void 0 && !state.objectMode) { - er = new TypeError("Invalid non-string/buffer chunk"); - } - return er; - } - __name(chunkInvalid, "chunkInvalid"); - function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); - } - __name(needMoreData, "needMoreData"); - Readable.prototype.isPaused = function() { - return this._readableState.flowing === false; - }; - Readable.prototype.setEncoding = function(enc) { - if (!StringDecoder) StringDecoder = require_string_decoder().StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; - }; - var MAX_HWM = 8388608; - function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; - } - __name(computeNewHighWaterMark, "computeNewHighWaterMark"); - function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - if (state.flowing && state.length) return state.buffer.head.data.length; - else return state.length; - } - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; - } - __name(howMuchToRead, "howMuchToRead"); - Readable.prototype.read = function(n) { - debug("read", n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - if (n !== 0) state.emittedReadable = false; - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug("read: emitReadable", state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this); - else emitReadable(this); - return null; - } - n = howMuchToRead(n, state); - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - var doRead = state.needReadable; - debug("need readable", doRead); - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug("length less than watermark", doRead); - } - if (state.ended || state.reading) { - doRead = false; - debug("reading or ended", doRead); - } else if (doRead) { - debug("do read"); - state.reading = true; - state.sync = true; - if (state.length === 0) state.needReadable = true; - this._read(state.highWaterMark); - state.sync = false; - if (!state.reading) n = howMuchToRead(nOrig, state); - } - var ret; - if (n > 0) ret = fromList(n, state); - else ret = null; - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } - if (state.length === 0) { - if (!state.ended) state.needReadable = true; - if (nOrig !== n && state.ended) endReadable(this); - } - if (ret !== null) this.emit("data", ret); - return ret; - }; - function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - emitReadable(stream); - } - __name(onEofChunk, "onEofChunk"); - function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug("emitReadable", state.flowing); - state.emittedReadable = true; - if (state.sync) pna.nextTick(emitReadable_, stream); - else emitReadable_(stream); - } - } - __name(emitReadable, "emitReadable"); - function emitReadable_(stream) { - debug("emit readable"); - stream.emit("readable"); - flow(stream); - } - __name(emitReadable_, "emitReadable_"); - function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - pna.nextTick(maybeReadMore_, stream, state); - } - } - __name(maybeReadMore, "maybeReadMore"); - function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug("maybeReadMore read 0"); - stream.read(0); - if (len === state.length) - break; - else len = state.length; - } - state.readingMore = false; - } - __name(maybeReadMore_, "maybeReadMore_"); - Readable.prototype._read = function(n) { - this.emit("error", new Error("_read() is not implemented")); - }; - Readable.prototype.pipe = function(dest, pipeOpts) { - var src = this; - var state = this._readableState; - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug("pipe count=%d opts=%j", state.pipesCount, pipeOpts); - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - var endFn = doEnd ? onend : unpipe; - if (state.endEmitted) pna.nextTick(endFn); - else src.once("end", endFn); - dest.on("unpipe", onunpipe); - function onunpipe(readable, unpipeInfo) { - debug("onunpipe"); - if (readable === src) { - if (unpipeInfo && unpipeInfo.hasUnpiped === false) { - unpipeInfo.hasUnpiped = true; - cleanup(); - } - } - } - __name(onunpipe, "onunpipe"); - function onend() { - debug("onend"); - dest.end(); - } - __name(onend, "onend"); - var ondrain = pipeOnDrain(src); - dest.on("drain", ondrain); - var cleanedUp = false; - function cleanup() { - debug("cleanup"); - dest.removeListener("close", onclose); - dest.removeListener("finish", onfinish); - dest.removeListener("drain", ondrain); - dest.removeListener("error", onerror); - dest.removeListener("unpipe", onunpipe); - src.removeListener("end", onend); - src.removeListener("end", unpipe); - src.removeListener("data", ondata); - cleanedUp = true; - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - __name(cleanup, "cleanup"); - var increasedAwaitDrain = false; - src.on("data", ondata); - function ondata(chunk) { - debug("ondata"); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug("false write response, pause", state.awaitDrain); - state.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); - } - } - __name(ondata, "ondata"); - function onerror(er) { - debug("onerror", er); - unpipe(); - dest.removeListener("error", onerror); - if (EElistenerCount(dest, "error") === 0) dest.emit("error", er); - } - __name(onerror, "onerror"); - prependListener(dest, "error", onerror); - function onclose() { - dest.removeListener("finish", onfinish); - unpipe(); - } - __name(onclose, "onclose"); - dest.once("close", onclose); - function onfinish() { - debug("onfinish"); - dest.removeListener("close", onclose); - unpipe(); - } - __name(onfinish, "onfinish"); - dest.once("finish", onfinish); - function unpipe() { - debug("unpipe"); - src.unpipe(dest); - } - __name(unpipe, "unpipe"); - dest.emit("pipe", src); - if (!state.flowing) { - debug("pipe resume"); - src.resume(); - } - return dest; - }; - function pipeOnDrain(src) { - return function() { - var state = src._readableState; - debug("pipeOnDrain", state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, "data")) { - state.flowing = true; - flow(src); - } - }; - } - __name(pipeOnDrain, "pipeOnDrain"); - Readable.prototype.unpipe = function(dest) { - var state = this._readableState; - var unpipeInfo = { hasUnpiped: false }; - if (state.pipesCount === 0) return this; - if (state.pipesCount === 1) { - if (dest && dest !== state.pipes) return this; - if (!dest) dest = state.pipes; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit("unpipe", this, unpipeInfo); - return this; - } - if (!dest) { - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - for (var i = 0; i < len; i++) { - dests[i].emit("unpipe", this, { hasUnpiped: false }); - } - return this; - } - var index = indexOf(state.pipes, dest); - if (index === -1) return this; - state.pipes.splice(index, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - dest.emit("unpipe", this, unpipeInfo); - return this; - }; - Readable.prototype.on = function(ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - if (ev === "data") { - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === "readable") { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - pna.nextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this); - } - } - } - return res; - }; - Readable.prototype.addListener = Readable.prototype.on; - function nReadingNextTick(self2) { - debug("readable nexttick read 0"); - self2.read(0); - } - __name(nReadingNextTick, "nReadingNextTick"); - Readable.prototype.resume = function() { - var state = this._readableState; - if (!state.flowing) { - debug("resume"); - state.flowing = true; - resume(this, state); - } - return this; - }; - function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - pna.nextTick(resume_, stream, state); - } - } - __name(resume, "resume"); - function resume_(stream, state) { - if (!state.reading) { - debug("resume read 0"); - stream.read(0); - } - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit("resume"); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); - } - __name(resume_, "resume_"); - Readable.prototype.pause = function() { - debug("call pause flowing=%j", this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug("pause"); - this._readableState.flowing = false; - this.emit("pause"); - } - return this; - }; - function flow(stream) { - var state = stream._readableState; - debug("flow", state.flowing); - while (state.flowing && stream.read() !== null) { - } - } - __name(flow, "flow"); - Readable.prototype.wrap = function(stream) { - var _this = this; - var state = this._readableState; - var paused = false; - stream.on("end", function() { - debug("wrapped end"); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) _this.push(chunk); - } - _this.push(null); - }); - stream.on("data", function(chunk) { - debug("wrapped data"); - if (state.decoder) chunk = state.decoder.write(chunk); - if (state.objectMode && (chunk === null || chunk === void 0)) return; - else if (!state.objectMode && (!chunk || !chunk.length)) return; - var ret = _this.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - for (var i in stream) { - if (this[i] === void 0 && typeof stream[i] === "function") { - this[i] = /* @__PURE__ */ function(method) { - return function() { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - for (var n = 0; n < kProxyEvents.length; n++) { - stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); - } - this._read = function(n2) { - debug("wrapped _read", n2); - if (paused) { - paused = false; - stream.resume(); - } - }; - return this; - }; - Object.defineProperty(Readable.prototype, "readableHighWaterMark", { - // making it explicit this property is not enumerable - // because otherwise some prototype manipulation in - // userland will fail - enumerable: false, - get: /* @__PURE__ */ __name(function() { - return this._readableState.highWaterMark; - }, "get") + return this; + }; + function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } + } + __name(resume, "resume"); + function resume_(stream, state) { + if (!state.reading) { + debug("resume read 0"); + stream.read(0); + } + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit("resume"); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); + } + __name(resume_, "resume_"); + Readable.prototype.pause = function() { + debug("call pause flowing=%j", this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug("pause"); + this._readableState.flowing = false; + this.emit("pause"); + } + return this; + }; + function flow(stream) { + var state = stream._readableState; + debug("flow", state.flowing); + while (state.flowing && stream.read() !== null) { + } + } + __name(flow, "flow"); + Readable.prototype.wrap = function(stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on("end", function() { + debug("wrapped end"); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on("data", function(chunk) { + debug("wrapped data"); + if (state.decoder) chunk = state.decoder.write(chunk); + if (state.objectMode && (chunk === null || chunk === void 0)) return; + else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + for (var i in stream) { + if (this[i] === void 0 && typeof stream[i] === "function") { + this[i] = /* @__PURE__ */ function(method) { + return function() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + this._read = function(n2) { + debug("wrapped _read", n2); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; + }; + Object.defineProperty(Readable.prototype, "readableHighWaterMark", { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: /* @__PURE__ */ __name(function() { + return this._readableState.highWaterMark; + }, "get") }); Readable._fromList = fromList; function fromList(n, state) { @@ -26750,7 +22599,7 @@ var require_support = __commonJS({ var require_base64 = __commonJS({ "../node_modules/jszip/lib/base64.js"(exports2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var support = require_support(); var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="; exports2.encode = function(input) { @@ -27421,7 +23270,7 @@ var require_setImmediate = __commonJS({ }); // ../node_modules/jszip/lib/utils.js -var require_utils5 = __commonJS({ +var require_utils3 = __commonJS({ "../node_modules/jszip/lib/utils.js"(exports2) { "use strict"; var support = require_support(); @@ -27982,7 +23831,7 @@ var require_GenericWorker = __commonJS({ var require_utf8 = __commonJS({ "../node_modules/jszip/lib/utf8.js"(exports2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var support = require_support(); var nodejsUtils = require_nodejsUtils(); var GenericWorker = require_GenericWorker(); @@ -28175,7 +24024,7 @@ var require_ConvertWorker = __commonJS({ "../node_modules/jszip/lib/stream/ConvertWorker.js"(exports2, module2) { "use strict"; var GenericWorker = require_GenericWorker(); - var utils = require_utils5(); + var utils = require_utils3(); function ConvertWorker(destType) { GenericWorker.call(this, "ConvertWorker to " + destType); this.destType = destType; @@ -28197,7 +24046,7 @@ var require_NodejsStreamOutputAdapter = __commonJS({ "../node_modules/jszip/lib/nodejs/NodejsStreamOutputAdapter.js"(exports2, module2) { "use strict"; var Readable = require_readable2().Readable; - var utils = require_utils5(); + var utils = require_utils3(); utils.inherits(NodejsStreamOutputAdapter, Readable); function NodejsStreamOutputAdapter(helper, options, updateCb) { Readable.call(this, options); @@ -28228,7 +24077,7 @@ var require_NodejsStreamOutputAdapter = __commonJS({ var require_StreamHelper = __commonJS({ "../node_modules/jszip/lib/stream/StreamHelper.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var ConvertWorker = require_ConvertWorker(); var GenericWorker = require_GenericWorker(); var base64 = require_base64(); @@ -28409,7 +24258,7 @@ var require_defaults = __commonJS({ var require_DataWorker = __commonJS({ "../node_modules/jszip/lib/stream/DataWorker.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); var DEFAULT_BLOCK_SIZE = 16 * 1024; function DataWorker(dataP) { @@ -28498,7 +24347,7 @@ var require_DataWorker = __commonJS({ var require_crc32 = __commonJS({ "../node_modules/jszip/lib/crc32.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); function makeTable() { var c, table = []; for (var n = 0; n < 256; n++) { @@ -28550,7 +24399,7 @@ var require_Crc32Probe = __commonJS({ "use strict"; var GenericWorker = require_GenericWorker(); var crc32 = require_crc32(); - var utils = require_utils5(); + var utils = require_utils3(); function Crc32Probe() { GenericWorker.call(this, "Crc32Probe"); this.withStreamInfo("crc32", 0); @@ -28569,7 +24418,7 @@ var require_Crc32Probe = __commonJS({ var require_DataLengthProbe = __commonJS({ "../node_modules/jszip/lib/stream/DataLengthProbe.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); function DataLengthProbe(propName) { GenericWorker.call(this, "DataLengthProbe for " + propName); @@ -33063,7 +28912,7 @@ var require_flate = __commonJS({ "use strict"; var USE_TYPEDARRAY = typeof Uint8Array !== "undefined" && typeof Uint16Array !== "undefined" && typeof Uint32Array !== "undefined"; var pako = require_pako(); - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); var ARRAY_TYPE = USE_TYPEDARRAY ? "uint8array" : "array"; exports2.magic = "\b\0"; @@ -33152,7 +29001,7 @@ var require_signature = __commonJS({ var require_ZipFileWorker = __commonJS({ "../node_modules/jszip/lib/generate/ZipFileWorker.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); var utf8 = require_utf8(); var crc32 = require_crc32(); @@ -33478,7 +29327,7 @@ var require_generate = __commonJS({ var require_NodejsStreamInputAdapter = __commonJS({ "../node_modules/jszip/lib/nodejs/NodejsStreamInputAdapter.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); function NodejsStreamInputAdapter(filename, stream) { GenericWorker.call(this, "Nodejs stream input adapter for " + filename); @@ -33539,7 +29388,7 @@ var require_object = __commonJS({ "../node_modules/jszip/lib/object.js"(exports2, module2) { "use strict"; var utf8 = require_utf8(); - var utils = require_utils5(); + var utils = require_utils3(); var GenericWorker = require_GenericWorker(); var StreamHelper = require_StreamHelper(); var defaults = require_defaults(); @@ -33812,7 +29661,7 @@ var require_object = __commonJS({ var require_DataReader = __commonJS({ "../node_modules/jszip/lib/reader/DataReader.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); function DataReader(data) { this.data = data; this.length = data.length; @@ -33936,7 +29785,7 @@ var require_ArrayReader = __commonJS({ "../node_modules/jszip/lib/reader/ArrayReader.js"(exports2, module2) { "use strict"; var DataReader = require_DataReader(); - var utils = require_utils5(); + var utils = require_utils3(); function ArrayReader(data) { DataReader.call(this, data); for (var i = 0; i < this.data.length; i++) { @@ -33979,7 +29828,7 @@ var require_StringReader = __commonJS({ "../node_modules/jszip/lib/reader/StringReader.js"(exports2, module2) { "use strict"; var DataReader = require_DataReader(); - var utils = require_utils5(); + var utils = require_utils3(); function StringReader(data) { DataReader.call(this, data); } @@ -34010,7 +29859,7 @@ var require_Uint8ArrayReader = __commonJS({ "../node_modules/jszip/lib/reader/Uint8ArrayReader.js"(exports2, module2) { "use strict"; var ArrayReader = require_ArrayReader(); - var utils = require_utils5(); + var utils = require_utils3(); function Uint8ArrayReader(data) { ArrayReader.call(this, data); } @@ -34034,7 +29883,7 @@ var require_NodeBufferReader = __commonJS({ "../node_modules/jszip/lib/reader/NodeBufferReader.js"(exports2, module2) { "use strict"; var Uint8ArrayReader = require_Uint8ArrayReader(); - var utils = require_utils5(); + var utils = require_utils3(); function NodeBufferReader(data) { Uint8ArrayReader.call(this, data); } @@ -34054,7 +29903,7 @@ var require_NodeBufferReader = __commonJS({ var require_readerFor = __commonJS({ "../node_modules/jszip/lib/reader/readerFor.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var support = require_support(); var ArrayReader = require_ArrayReader(); var StringReader = require_StringReader(); @@ -34082,7 +29931,7 @@ var require_zipEntry = __commonJS({ "../node_modules/jszip/lib/zipEntry.js"(exports2, module2) { "use strict"; var readerFor = require_readerFor(); - var utils = require_utils5(); + var utils = require_utils3(); var CompressedObject = require_compressedObject(); var crc32fn = require_crc32(); var utf8 = require_utf8(); @@ -34301,7 +30150,7 @@ var require_zipEntries = __commonJS({ "../node_modules/jszip/lib/zipEntries.js"(exports2, module2) { "use strict"; var readerFor = require_readerFor(); - var utils = require_utils5(); + var utils = require_utils3(); var sig = require_signature(); var ZipEntry = require_zipEntry(); var support = require_support(); @@ -34499,7 +30348,7 @@ var require_zipEntries = __commonJS({ var require_load = __commonJS({ "../node_modules/jszip/lib/load.js"(exports2, module2) { "use strict"; - var utils = require_utils5(); + var utils = require_utils3(); var external = require_external(); var utf8 = require_utf8(); var ZipEntries = require_zipEntries(); @@ -55537,7 +51386,7 @@ var require_lib6 = __commonJS({ }); // ../node_modules/whatwg-url/lib/utils.js -var require_utils6 = __commonJS({ +var require_utils4 = __commonJS({ "../node_modules/whatwg-url/lib/utils.js"(exports2, module2) { "use strict"; module2.exports.mixin = /* @__PURE__ */ __name(function mixin(target, source) { @@ -57009,7 +52858,7 @@ var require_URL = __commonJS({ "../node_modules/whatwg-url/lib/URL.js"(exports2, module2) { "use strict"; var conversions = require_lib6(); - var utils = require_utils6(); + var utils = require_utils4(); var Impl = require_URL_impl(); var impl = utils.implSymbol; function URL3(url) { @@ -70442,27 +66291,558 @@ var require_dist4 = __commonJS({ name: "String" } }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } + }; + var BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" + name: "DateTimeRfc1123" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", type: { - name: "Number" + name: "DateTimeRfc1123" } }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } + }; + var BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } + }; + var BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70490,140 +66870,160 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", + } + } + } + }; + var BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + } + } + } + }; + var BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Boolean" + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - accessTier: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" + name: "DateTimeRfc1123" } }, - accessTierInferred: { - serializedName: "x-ms-access-tier-inferred", - xmlName: "x-ms-access-tier-inferred", + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", type: { name: "Boolean" } - }, - archiveStatus: { - serializedName: "x-ms-archive-status", - xmlName: "x-ms-archive-status", + } + } + } + }; + var BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - accessTierChangedOn: { - serializedName: "x-ms-access-tier-change-time", - xmlName: "x-ms-access-tier-change-time", - type: { - name: "DateTimeRfc1123" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", + } + } + } + }; + var BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Boolean" + name: "DateTimeRfc1123" } }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" + name: "String" } }, - expiresOn: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "DateTimeRfc1123" + name: "String" } }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Boolean" + name: "String" } }, - rehydratePriority: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", type: { - name: "Enum", - allowedValues: ["High", "Standard"] + name: "String" } }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", + date: { + serializedName: "date", + xmlName: "date", type: { name: "DateTimeRfc1123" } }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", type: { - name: "DateTimeRfc1123" + name: "Boolean" } }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + name: "String" } }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Boolean" + name: "String" } }, errorCode: { @@ -70636,11 +67036,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobGetPropertiesExceptionHeaders = { - serializedName: "Blob_getPropertiesExceptionHeaders", + var BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", - className: "BlobGetPropertiesExceptionHeaders", + className: "BlobSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70652,12 +67052,33 @@ var require_dist4 = __commonJS({ } } }; - var BlobDeleteHeaders = { - serializedName: "Blob_deleteHeaders", + var BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", - className: "BlobDeleteHeaders", + className: "BlobAcquireLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70685,22 +67106,15 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobDeleteExceptionHeaders = { - serializedName: "Blob_deleteExceptionHeaders", + var BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobDeleteExceptionHeaders", + className: "BlobAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70712,12 +67126,26 @@ var require_dist4 = __commonJS({ } } }; - var BlobUndeleteHeaders = { - serializedName: "Blob_undeleteHeaders", + var BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", - className: "BlobUndeleteHeaders", + className: "BlobReleaseLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70745,22 +67173,15 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobUndeleteExceptionHeaders = { - serializedName: "Blob_undeleteExceptionHeaders", + var BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobUndeleteExceptionHeaders", + className: "BlobReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70772,11 +67193,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetExpiryHeaders = { - serializedName: "Blob_setExpiryHeaders", + var BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", - className: "BlobSetExpiryHeaders", + className: "BlobRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", @@ -70792,6 +67213,13 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70823,11 +67251,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetExpiryExceptionHeaders = { - serializedName: "Blob_setExpiryExceptionHeaders", + var BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetExpiryExceptionHeaders", + className: "BlobRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70839,11 +67267,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetHttpHeadersHeaders = { - serializedName: "Blob_setHttpHeadersHeaders", + var BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersHeaders", + className: "BlobChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", @@ -70859,13 +67287,6 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70880,6 +67301,13 @@ var require_dist4 = __commonJS({ name: "String" } }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", @@ -70893,22 +67321,15 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var BlobSetHttpHeadersExceptionHeaders = { - serializedName: "Blob_setHttpHeadersExceptionHeaders", + var BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersExceptionHeaders", + className: "BlobChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70920,12 +67341,33 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetImmutabilityPolicyHeaders = { - serializedName: "Blob_setImmutabilityPolicyHeaders", + var BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyHeaders", + className: "BlobBreakLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -70953,30 +67395,15 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } - }, - immutabilityPolicyExpiry: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } } } } }; - var BlobSetImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + var BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyExceptionHeaders", + className: "BlobBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -70988,12 +67415,33 @@ var require_dist4 = __commonJS({ } } }; - var BlobDeleteImmutabilityPolicyHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyHeaders", + var BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyHeaders", + className: "BlobCreateSnapshotHeaders", modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71015,21 +67463,42 @@ var require_dist4 = __commonJS({ name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobDeleteImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + var BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + className: "BlobCreateSnapshotExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71041,12 +67510,26 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetLegalHoldHeaders = { - serializedName: "Blob_setLegalHoldHeaders", + var BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldHeaders", + className: "BlobStartCopyFromURLHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71068,6 +67551,13 @@ var require_dist4 = __commonJS({ name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -71075,21 +67565,36 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" + name: "String" } } } } }; - var BlobSetLegalHoldExceptionHeaders = { - serializedName: "Blob_setLegalHoldExceptionHeaders", + var BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldExceptionHeaders", + className: "BlobStartCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71101,11 +67606,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetMetadataHeaders = { - serializedName: "Blob_setMetadataHeaders", + var BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", - className: "BlobSetMetadataHeaders", + className: "BlobCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", @@ -71156,20 +67661,35 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", type: { - name: "Boolean" + name: "String" } }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", type: { name: "String" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", @@ -71187,11 +67707,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetMetadataExceptionHeaders = { - serializedName: "Blob_setMetadataExceptionHeaders", + var BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobSetMetadataExceptionHeaders", + className: "BlobCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71203,33 +67723,12 @@ var require_dist4 = __commonJS({ } } }; - var BlobAcquireLeaseHeaders = { - serializedName: "Blob_acquireLeaseHeaders", + var BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseHeaders", + className: "BlobAbortCopyFromURLHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71257,15 +67756,22 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobAcquireLeaseExceptionHeaders = { - serializedName: "Blob_acquireLeaseExceptionHeaders", + var BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseExceptionHeaders", + className: "BlobAbortCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71277,26 +67783,12 @@ var require_dist4 = __commonJS({ } } }; - var BlobReleaseLeaseHeaders = { - serializedName: "Blob_releaseLeaseHeaders", + var BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseHeaders", + className: "BlobSetTierHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71318,21 +67810,21 @@ var require_dist4 = __commonJS({ name: "String" } }, - date: { - serializedName: "date", - xmlName: "date", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" + name: "String" } } } } }; - var BlobReleaseLeaseExceptionHeaders = { - serializedName: "Blob_releaseLeaseExceptionHeaders", + var BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseExceptionHeaders", + className: "BlobSetTierExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71344,33 +67836,12 @@ var require_dist4 = __commonJS({ } } }; - var BlobRenewLeaseHeaders = { - serializedName: "Blob_renewLeaseHeaders", + var BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", - className: "BlobRenewLeaseHeaders", + className: "BlobGetAccountInfoHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71398,15 +67869,43 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } } } } }; - var BlobRenewLeaseExceptionHeaders = { - serializedName: "Blob_renewLeaseExceptionHeaders", + var BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "BlobRenewLeaseExceptionHeaders", + className: "BlobGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71418,12 +67917,48 @@ var require_dist4 = __commonJS({ } } }; - var BlobChangeLeaseHeaders = { - serializedName: "Blob_changeLeaseHeaders", + var BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", type: { name: "Composite", - className: "BlobChangeLeaseHeaders", + className: "BlobQueryHeaders", modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, etag: { serializedName: "etag", xmlName: "etag", @@ -71431,13 +67966,129 @@ var require_dist4 = __commonJS({ name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123" } }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71452,16 +68103,16 @@ var require_dist4 = __commonJS({ name: "String" } }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", type: { name: "String" } @@ -71472,15 +68123,64 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } } } } }; - var BlobChangeLeaseExceptionHeaders = { - serializedName: "Blob_changeLeaseExceptionHeaders", + var BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", - className: "BlobChangeLeaseExceptionHeaders", + className: "BlobQueryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71492,33 +68192,12 @@ var require_dist4 = __commonJS({ } } }; - var BlobBreakLeaseHeaders = { - serializedName: "Blob_breakLeaseHeaders", + var BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", type: { name: "Composite", - className: "BlobBreakLeaseHeaders", + className: "BlobGetTagsHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71546,15 +68225,22 @@ var require_dist4 = __commonJS({ type: { name: "DateTimeRfc1123" } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } } } } }; - var BlobBreakLeaseExceptionHeaders = { - serializedName: "Blob_breakLeaseExceptionHeaders", + var BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", - className: "BlobBreakLeaseExceptionHeaders", + className: "BlobGetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71566,33 +68252,12 @@ var require_dist4 = __commonJS({ } } }; - var BlobCreateSnapshotHeaders = { - serializedName: "Blob_createSnapshotHeaders", + var BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotHeaders", + className: "BlobSetTagsHeaders", modelProperties: { - snapshot: { - serializedName: "x-ms-snapshot", - xmlName: "x-ms-snapshot", - type: { - name: "String" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71614,13 +68279,6 @@ var require_dist4 = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -71628,13 +68286,6 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -71645,11 +68296,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobCreateSnapshotExceptionHeaders = { - serializedName: "Blob_createSnapshotExceptionHeaders", + var BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotExceptionHeaders", + className: "BlobSetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71660,12 +68311,12 @@ var require_dist4 = __commonJS({ } } } - }; - var BlobStartCopyFromURLHeaders = { - serializedName: "Blob_startCopyFromURLHeaders", + }; + var PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLHeaders", + className: "PageBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", @@ -71681,6 +68332,13 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71716,19 +68374,25 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "String" } }, errorCode: { @@ -71741,11 +68405,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobStartCopyFromURLExceptionHeaders = { - serializedName: "Blob_startCopyFromURLExceptionHeaders", + var PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLExceptionHeaders", + className: "PageBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71757,11 +68421,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobCopyFromURLHeaders = { - serializedName: "Blob_copyFromURLHeaders", + var PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", - className: "BlobCopyFromURLHeaders", + className: "PageBlobUploadPagesHeaders", modelProperties: { etag: { serializedName: "etag", @@ -71777,6 +68441,27 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71798,13 +68483,6 @@ var require_dist4 = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -71812,35 +68490,20 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", type: { - name: "String" + name: "Boolean" } }, - copyStatus: { - defaultValue: "success", - isConstant: true, - serializedName: "x-ms-copy-status", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", @@ -71858,11 +68521,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobCopyFromURLExceptionHeaders = { - serializedName: "Blob_copyFromURLExceptionHeaders", + var PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", - className: "BlobCopyFromURLExceptionHeaders", + className: "PageBlobUploadPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71874,12 +68537,47 @@ var require_dist4 = __commonJS({ } } }; - var BlobAbortCopyFromURLHeaders = { - serializedName: "Blob_abortCopyFromURLHeaders", + var PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLHeaders", + className: "PageBlobClearPagesHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -71918,11 +68616,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobAbortCopyFromURLExceptionHeaders = { - serializedName: "Blob_abortCopyFromURLExceptionHeaders", + var PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLExceptionHeaders", + className: "PageBlobClearPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71934,19 +68632,47 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetTierHeaders = { - serializedName: "Blob_setTierHeaders", + var PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", - className: "BlobSetTierHeaders", + className: "PageBlobUploadPagesFromURLHeaders", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", @@ -71961,6 +68687,34 @@ var require_dist4 = __commonJS({ name: "String" } }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -71971,11 +68725,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetTierExceptionHeaders = { - serializedName: "Blob_setTierExceptionHeaders", + var PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", - className: "BlobSetTierExceptionHeaders", + className: "PageBlobUploadPagesFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -71987,12 +68741,33 @@ var require_dist4 = __commonJS({ } } }; - var BlobGetAccountInfoHeaders = { - serializedName: "Blob_getAccountInfoHeaders", + var PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", - className: "BlobGetAccountInfoHeaders", + className: "PageBlobGetPageRangesHeaders", modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -72021,42 +68796,21 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", - type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } - }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] + name: "String" } } } } }; - var BlobGetAccountInfoExceptionHeaders = { - serializedName: "Blob_getAccountInfoExceptionHeaders", + var PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", - className: "BlobGetAccountInfoExceptionHeaders", + className: "PageBlobGetPageRangesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72068,11 +68822,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobQueryHeaders = { - serializedName: "Blob_queryHeaders", + var PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", - className: "BlobQueryHeaders", + className: "PageBlobGetPageRangesDiffHeaders", modelProperties: { lastModified: { serializedName: "last-modified", @@ -72081,35 +68835,6 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", - type: { - name: "String" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -72117,127 +68842,92 @@ var require_dist4 = __commonJS({ name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", type: { - name: "String" + name: "Number" } }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { name: "String" } }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - copyCompletionTime: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", + date: { + serializedName: "date", + xmlName: "date", type: { name: "DateTimeRfc1123" } }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", - type: { - name: "String" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", + } + } + } + }; + var PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", + } + } + } + }; + var PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] + name: "DateTimeRfc1123" } }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] + name: "Number" } }, clientRequestId: { @@ -72248,66 +68938,24 @@ var require_dist4 = __commonJS({ } }, requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { name: "String" } }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { name: "String" } }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "ByteArray" + name: "DateTimeRfc1123" } }, errorCode: { @@ -72316,22 +68964,15 @@ var require_dist4 = __commonJS({ type: { name: "String" } - }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } } } } }; - var BlobQueryExceptionHeaders = { - serializedName: "Blob_queryExceptionHeaders", + var PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", - className: "BlobQueryExceptionHeaders", + className: "PageBlobResizeExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72343,12 +68984,33 @@ var require_dist4 = __commonJS({ } } }; - var BlobGetTagsHeaders = { - serializedName: "Blob_getTagsHeaders", + var PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", - className: "BlobGetTagsHeaders", + className: "PageBlobUpdateSequenceNumberHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -72387,11 +69049,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobGetTagsExceptionHeaders = { - serializedName: "Blob_getTagsExceptionHeaders", + var PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", - className: "BlobGetTagsExceptionHeaders", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72403,12 +69065,26 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetTagsHeaders = { - serializedName: "Blob_setTagsHeaders", + var PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", - className: "BlobSetTagsHeaders", + className: "PageBlobCopyIncrementalHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -72437,6 +69113,21 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -72447,11 +69138,11 @@ var require_dist4 = __commonJS({ } } }; - var BlobSetTagsExceptionHeaders = { - serializedName: "Blob_setTagsExceptionHeaders", + var PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", - className: "BlobSetTagsExceptionHeaders", + className: "PageBlobCopyIncrementalExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72463,11 +69154,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobCreateHeaders = { - serializedName: "PageBlob_createHeaders", + var AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", type: { name: "Composite", - className: "PageBlobCreateHeaders", + className: "AppendBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", @@ -72556,11 +69247,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobCreateExceptionHeaders = { - serializedName: "PageBlob_createExceptionHeaders", + var AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", - className: "PageBlobCreateExceptionHeaders", + className: "AppendBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72572,11 +69263,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobUploadPagesHeaders = { - serializedName: "PageBlob_uploadPagesHeaders", + var AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesHeaders", + className: "AppendBlobAppendBlockHeaders", modelProperties: { etag: { serializedName: "etag", @@ -72606,13 +69297,6 @@ var require_dist4 = __commonJS({ name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -72641,6 +69325,20 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", @@ -72672,11 +69370,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobUploadPagesExceptionHeaders = { - serializedName: "PageBlob_uploadPagesExceptionHeaders", + var AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesExceptionHeaders", + className: "AppendBlobAppendBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72688,11 +69386,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobClearPagesHeaders = { - serializedName: "PageBlob_clearPagesHeaders", + var AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", - className: "PageBlobClearPagesHeaders", + className: "AppendBlobAppendBlockFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", @@ -72722,20 +69420,6 @@ var require_dist4 = __commonJS({ name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", @@ -72757,6 +69441,41 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -72767,11 +69486,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobClearPagesExceptionHeaders = { - serializedName: "PageBlob_clearPagesExceptionHeaders", + var AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", - className: "PageBlobClearPagesExceptionHeaders", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72783,11 +69502,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobUploadPagesFromURLHeaders = { - serializedName: "PageBlob_uploadPagesFromURLHeaders", + var AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLHeaders", + className: "AppendBlobSealHeaders", modelProperties: { etag: { serializedName: "etag", @@ -72803,25 +69522,11 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" + name: "String" } }, requestId: { @@ -72845,42 +69550,21 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", type: { name: "Boolean" } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } } } } }; - var PageBlobUploadPagesFromURLExceptionHeaders = { - serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + var AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLExceptionHeaders", + className: "AppendBlobSealExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72892,19 +69576,12 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobGetPageRangesHeaders = { - serializedName: "PageBlob_getPageRangesHeaders", + var BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesHeaders", + className: "BlockBlobUploadHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -72912,11 +69589,18 @@ var require_dist4 = __commonJS({ name: "String" } }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" } }, clientRequestId: { @@ -72940,6 +69624,13 @@ var require_dist4 = __commonJS({ name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -72947,6 +69638,27 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -72957,11 +69669,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobGetPageRangesExceptionHeaders = { - serializedName: "PageBlob_getPageRangesExceptionHeaders", + var BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesExceptionHeaders", + className: "BlockBlobUploadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -72973,19 +69685,12 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobGetPageRangesDiffHeaders = { - serializedName: "PageBlob_getPageRangesDiffHeaders", + var BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffHeaders", + className: "BlockBlobPutBlobFromUrlHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, etag: { serializedName: "etag", xmlName: "etag", @@ -72993,11 +69698,18 @@ var require_dist4 = __commonJS({ name: "String" } }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" } }, clientRequestId: { @@ -73021,6 +69733,13 @@ var require_dist4 = __commonJS({ name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -73028,6 +69747,27 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -73038,11 +69778,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobGetPageRangesDiffExceptionHeaders = { - serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + var BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesDiffExceptionHeaders", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73054,31 +69794,17 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobResizeHeaders = { - serializedName: "PageBlob_resizeHeaders", + var BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", - className: "PageBlobResizeHeaders", + className: "BlockBlobStageBlockHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "Number" + name: "ByteArray" } }, clientRequestId: { @@ -73109,6 +69835,34 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -73119,11 +69873,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobResizeExceptionHeaders = { - serializedName: "PageBlob_resizeExceptionHeaders", + var BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", - className: "PageBlobResizeExceptionHeaders", + className: "BlockBlobStageBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73135,31 +69889,24 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobUpdateSequenceNumberHeaders = { - serializedName: "PageBlob_updateSequenceNumberHeaders", + var BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberHeaders", + className: "BlockBlobStageBlockFromURLHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "DateTimeRfc1123" + name: "ByteArray" } }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", type: { - name: "Number" + name: "ByteArray" } }, clientRequestId: { @@ -73190,6 +69937,27 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -73200,11 +69968,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobUpdateSequenceNumberExceptionHeaders = { - serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + var BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberExceptionHeaders", + className: "BlockBlobStageBlockFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73216,11 +69984,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobCopyIncrementalHeaders = { - serializedName: "PageBlob_copyIncrementalHeaders", + var BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", - className: "PageBlobCopyIncrementalHeaders", + className: "BlockBlobCommitBlockListHeaders", modelProperties: { etag: { serializedName: "etag", @@ -73236,6 +70004,20 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", @@ -73257,6 +70039,13 @@ var require_dist4 = __commonJS({ name: "String" } }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, date: { serializedName: "date", xmlName: "date", @@ -73264,19 +70053,25 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] + name: "String" } }, errorCode: { @@ -73289,11 +70084,11 @@ var require_dist4 = __commonJS({ } } }; - var PageBlobCopyIncrementalExceptionHeaders = { - serializedName: "PageBlob_copyIncrementalExceptionHeaders", + var BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", - className: "PageBlobCopyIncrementalExceptionHeaders", + className: "BlockBlobCommitBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", @@ -73305,12 +70100,19 @@ var require_dist4 = __commonJS({ } } }; - var AppendBlobCreateHeaders = { - serializedName: "AppendBlob_createHeaders", + var BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", - className: "AppendBlobCreateHeaders", + className: "BlockBlobGetBlockListHeaders", modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, etag: { serializedName: "etag", xmlName: "etag", @@ -73318,18 +70120,18 @@ var require_dist4 = __commonJS({ name: "String" } }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "DateTimeRfc1123" + name: "String" } }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", type: { - name: "ByteArray" + name: "Number" } }, clientRequestId: { @@ -73353,13 +70155,6 @@ var require_dist4 = __commonJS({ name: "String" } }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, date: { serializedName: "date", xmlName: "date", @@ -73367,27 +70162,22 @@ var require_dist4 = __commonJS({ name: "DateTimeRfc1123" } }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { name: "String" } - }, + } + } + } + }; + var BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -73398,1238 +70188,945 @@ var require_dist4 = __commonJS({ } } }; - var AppendBlobCreateExceptionHeaders = { - serializedName: "AppendBlob_createExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobCreateExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var Mappers = /* @__PURE__ */ Object.freeze({ + __proto__: null, + BlobServiceProperties, + Logging, + RetentionPolicy, + Metrics, + CorsRule, + StaticWebsite, + StorageError, + BlobServiceStatistics, + GeoReplication, + ListContainersSegmentResponse, + ContainerItem, + ContainerProperties, + KeyInfo, + UserDelegationKey, + FilterBlobSegment, + FilterBlobItem, + BlobTags, + BlobTag, + SignedIdentifier, + AccessPolicy, + ListBlobsFlatSegmentResponse, + BlobFlatListSegment, + BlobItemInternal, + BlobName, + BlobPropertiesInternal, + ListBlobsHierarchySegmentResponse, + BlobHierarchyListSegment, + BlobPrefix, + BlockLookupList, + BlockList, + Block, + PageList, + PageRange, + ClearRange, + QueryRequest, + QuerySerialization, + QueryFormat, + DelimitedTextConfiguration, + JsonTextConfiguration, + ArrowConfiguration, + ArrowField, + ServiceSetPropertiesHeaders, + ServiceSetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders, + ServiceGetPropertiesExceptionHeaders, + ServiceGetStatisticsHeaders, + ServiceGetStatisticsExceptionHeaders, + ServiceListContainersSegmentHeaders, + ServiceListContainersSegmentExceptionHeaders, + ServiceGetUserDelegationKeyHeaders, + ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetAccountInfoHeaders, + ServiceGetAccountInfoExceptionHeaders, + ServiceSubmitBatchHeaders, + ServiceSubmitBatchExceptionHeaders, + ServiceFilterBlobsHeaders, + ServiceFilterBlobsExceptionHeaders, + ContainerCreateHeaders, + ContainerCreateExceptionHeaders, + ContainerGetPropertiesHeaders, + ContainerGetPropertiesExceptionHeaders, + ContainerDeleteHeaders, + ContainerDeleteExceptionHeaders, + ContainerSetMetadataHeaders, + ContainerSetMetadataExceptionHeaders, + ContainerGetAccessPolicyHeaders, + ContainerGetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders, + ContainerSetAccessPolicyExceptionHeaders, + ContainerRestoreHeaders, + ContainerRestoreExceptionHeaders, + ContainerRenameHeaders, + ContainerRenameExceptionHeaders, + ContainerSubmitBatchHeaders, + ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders, + ContainerAcquireLeaseHeaders, + ContainerAcquireLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders, + ContainerReleaseLeaseExceptionHeaders, + ContainerRenewLeaseHeaders, + ContainerRenewLeaseExceptionHeaders, + ContainerBreakLeaseHeaders, + ContainerBreakLeaseExceptionHeaders, + ContainerChangeLeaseHeaders, + ContainerChangeLeaseExceptionHeaders, + ContainerListBlobFlatSegmentHeaders, + ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerGetAccountInfoHeaders, + ContainerGetAccountInfoExceptionHeaders, + BlobDownloadHeaders, + BlobDownloadExceptionHeaders, + BlobGetPropertiesHeaders, + BlobGetPropertiesExceptionHeaders, + BlobDeleteHeaders, + BlobDeleteExceptionHeaders, + BlobUndeleteHeaders, + BlobUndeleteExceptionHeaders, + BlobSetExpiryHeaders, + BlobSetExpiryExceptionHeaders, + BlobSetHttpHeadersHeaders, + BlobSetHttpHeadersExceptionHeaders, + BlobSetImmutabilityPolicyHeaders, + BlobSetImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobSetLegalHoldHeaders, + BlobSetLegalHoldExceptionHeaders, + BlobSetMetadataHeaders, + BlobSetMetadataExceptionHeaders, + BlobAcquireLeaseHeaders, + BlobAcquireLeaseExceptionHeaders, + BlobReleaseLeaseHeaders, + BlobReleaseLeaseExceptionHeaders, + BlobRenewLeaseHeaders, + BlobRenewLeaseExceptionHeaders, + BlobChangeLeaseHeaders, + BlobChangeLeaseExceptionHeaders, + BlobBreakLeaseHeaders, + BlobBreakLeaseExceptionHeaders, + BlobCreateSnapshotHeaders, + BlobCreateSnapshotExceptionHeaders, + BlobStartCopyFromURLHeaders, + BlobStartCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders, + BlobCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders, + BlobAbortCopyFromURLExceptionHeaders, + BlobSetTierHeaders, + BlobSetTierExceptionHeaders, + BlobGetAccountInfoHeaders, + BlobGetAccountInfoExceptionHeaders, + BlobQueryHeaders, + BlobQueryExceptionHeaders, + BlobGetTagsHeaders, + BlobGetTagsExceptionHeaders, + BlobSetTagsHeaders, + BlobSetTagsExceptionHeaders, + PageBlobCreateHeaders, + PageBlobCreateExceptionHeaders, + PageBlobUploadPagesHeaders, + PageBlobUploadPagesExceptionHeaders, + PageBlobClearPagesHeaders, + PageBlobClearPagesExceptionHeaders, + PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobGetPageRangesHeaders, + PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobResizeHeaders, + PageBlobResizeExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobCopyIncrementalHeaders, + PageBlobCopyIncrementalExceptionHeaders, + AppendBlobCreateHeaders, + AppendBlobCreateExceptionHeaders, + AppendBlobAppendBlockHeaders, + AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobSealHeaders, + AppendBlobSealExceptionHeaders, + BlockBlobUploadHeaders, + BlockBlobUploadExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobStageBlockHeaders, + BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobCommitBlockListHeaders, + BlockBlobCommitBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders + }); + var contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } + }; + var blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties + }; + var accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + var url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true + }; + var restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + var comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } + }; + var version3 = { + parameterPath: "version", + mapper: { + defaultValue: "2023-11-03", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } + }; + var requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } + }; + var accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } + }; + var comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } + }; + var marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } + }; + var maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } + }; + var include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv + }; + var keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo + }; + var comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } + }; + var body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } + }; + var comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } } }; - var AppendBlobAppendBlockHeaders = { - serializedName: "AppendBlob_appendBlockHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" } } }; - var AppendBlobAppendBlockExceptionHeaders = { - serializedName: "AppendBlob_appendBlockExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" } } }; - var AppendBlobAppendBlockFromUrlHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockFromUrlHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } } }; - var AppendBlobAppendBlockFromUrlExceptionHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobAppendBlockFromUrlExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" } } }; - var AppendBlobSealHeaders = { - serializedName: "AppendBlob_sealHeaders", - type: { - name: "Composite", - className: "AppendBlobSealHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", - type: { - name: "Boolean" - } - } + var restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" } } }; - var AppendBlobSealExceptionHeaders = { - serializedName: "AppendBlob_sealExceptionHeaders", - type: { - name: "Composite", - className: "AppendBlobSealExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } + }; + var access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] } } }; - var BlockBlobUploadHeaders = { - serializedName: "BlockBlob_uploadHeaders", - type: { - name: "Composite", - className: "BlockBlobUploadHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } + }; + var preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } + }; + var leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } + }; + var ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" } } }; - var BlockBlobUploadExceptionHeaders = { - serializedName: "BlockBlob_uploadExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobUploadExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" } } }; - var BlockBlobPutBlobFromUrlHeaders = { - serializedName: "BlockBlob_putBlobFromUrlHeaders", - type: { - name: "Composite", - className: "BlockBlobPutBlobFromUrlHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } } }; - var BlockBlobPutBlobFromUrlExceptionHeaders = { - serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobPutBlobFromUrlExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } } }; - var BlockBlobStageBlockHeaders = { - serializedName: "BlockBlob_stageBlockHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockHeaders", - modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + var containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { type: { - name: "String" + name: "Composite", + className: "SignedIdentifier" } } } } }; - var BlockBlobStageBlockExceptionHeaders = { - serializedName: "BlockBlob_stageBlockExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" } } }; - var BlockBlobStageBlockFromURLHeaders = { - serializedName: "BlockBlob_stageBlockFromURLHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockFromURLHeaders", - modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" } } }; - var BlockBlobStageBlockFromURLExceptionHeaders = { - serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobStageBlockFromURLExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" } } }; - var BlockBlobCommitBlockListHeaders = { - serializedName: "BlockBlob_commitBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } + }; + var sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } + }; + var comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } + }; + var action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } + }; + var duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } + }; + var proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" } } }; - var BlockBlobCommitBlockListExceptionHeaders = { - serializedName: "BlockBlob_commitBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobCommitBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" } } }; - var BlockBlobGetBlockListExceptionHeaders = { - serializedName: "BlockBlob_getBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + var action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" } } }; - var Mappers = /* @__PURE__ */ Object.freeze({ - __proto__: null, - BlobServiceProperties, - Logging, - RetentionPolicy, - Metrics, - CorsRule, - StaticWebsite, - StorageError, - BlobServiceStatistics, - GeoReplication, - ListContainersSegmentResponse, - ContainerItem, - ContainerProperties, - KeyInfo, - UserDelegationKey, - FilterBlobSegment, - FilterBlobItem, - BlobTags, - BlobTag, - SignedIdentifier, - AccessPolicy, - ListBlobsFlatSegmentResponse, - BlobFlatListSegment, - BlobItemInternal, - BlobName, - BlobPropertiesInternal, - ListBlobsHierarchySegmentResponse, - BlobHierarchyListSegment, - BlobPrefix, - BlockLookupList, - BlockList, - Block, - PageList, - PageRange, - ClearRange, - QueryRequest, - QuerySerialization, - QueryFormat, - DelimitedTextConfiguration, - JsonTextConfiguration, - ArrowConfiguration, - ArrowField, - ServiceSetPropertiesHeaders, - ServiceSetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders, - ServiceGetPropertiesExceptionHeaders, - ServiceGetStatisticsHeaders, - ServiceGetStatisticsExceptionHeaders, - ServiceListContainersSegmentHeaders, - ServiceListContainersSegmentExceptionHeaders, - ServiceGetUserDelegationKeyHeaders, - ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetAccountInfoHeaders, - ServiceGetAccountInfoExceptionHeaders, - ServiceSubmitBatchHeaders, - ServiceSubmitBatchExceptionHeaders, - ServiceFilterBlobsHeaders, - ServiceFilterBlobsExceptionHeaders, - ContainerCreateHeaders, - ContainerCreateExceptionHeaders, - ContainerGetPropertiesHeaders, - ContainerGetPropertiesExceptionHeaders, - ContainerDeleteHeaders, - ContainerDeleteExceptionHeaders, - ContainerSetMetadataHeaders, - ContainerSetMetadataExceptionHeaders, - ContainerGetAccessPolicyHeaders, - ContainerGetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders, - ContainerSetAccessPolicyExceptionHeaders, - ContainerRestoreHeaders, - ContainerRestoreExceptionHeaders, - ContainerRenameHeaders, - ContainerRenameExceptionHeaders, - ContainerSubmitBatchHeaders, - ContainerSubmitBatchExceptionHeaders, - ContainerFilterBlobsHeaders, - ContainerFilterBlobsExceptionHeaders, - ContainerAcquireLeaseHeaders, - ContainerAcquireLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders, - ContainerReleaseLeaseExceptionHeaders, - ContainerRenewLeaseHeaders, - ContainerRenewLeaseExceptionHeaders, - ContainerBreakLeaseHeaders, - ContainerBreakLeaseExceptionHeaders, - ContainerChangeLeaseHeaders, - ContainerChangeLeaseExceptionHeaders, - ContainerListBlobFlatSegmentHeaders, - ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders, - ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerGetAccountInfoHeaders, - ContainerGetAccountInfoExceptionHeaders, - BlobDownloadHeaders, - BlobDownloadExceptionHeaders, - BlobGetPropertiesHeaders, - BlobGetPropertiesExceptionHeaders, - BlobDeleteHeaders, - BlobDeleteExceptionHeaders, - BlobUndeleteHeaders, - BlobUndeleteExceptionHeaders, - BlobSetExpiryHeaders, - BlobSetExpiryExceptionHeaders, - BlobSetHttpHeadersHeaders, - BlobSetHttpHeadersExceptionHeaders, - BlobSetImmutabilityPolicyHeaders, - BlobSetImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobSetLegalHoldHeaders, - BlobSetLegalHoldExceptionHeaders, - BlobSetMetadataHeaders, - BlobSetMetadataExceptionHeaders, - BlobAcquireLeaseHeaders, - BlobAcquireLeaseExceptionHeaders, - BlobReleaseLeaseHeaders, - BlobReleaseLeaseExceptionHeaders, - BlobRenewLeaseHeaders, - BlobRenewLeaseExceptionHeaders, - BlobChangeLeaseHeaders, - BlobChangeLeaseExceptionHeaders, - BlobBreakLeaseHeaders, - BlobBreakLeaseExceptionHeaders, - BlobCreateSnapshotHeaders, - BlobCreateSnapshotExceptionHeaders, - BlobStartCopyFromURLHeaders, - BlobStartCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders, - BlobCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders, - BlobAbortCopyFromURLExceptionHeaders, - BlobSetTierHeaders, - BlobSetTierExceptionHeaders, - BlobGetAccountInfoHeaders, - BlobGetAccountInfoExceptionHeaders, - BlobQueryHeaders, - BlobQueryExceptionHeaders, - BlobGetTagsHeaders, - BlobGetTagsExceptionHeaders, - BlobSetTagsHeaders, - BlobSetTagsExceptionHeaders, - PageBlobCreateHeaders, - PageBlobCreateExceptionHeaders, - PageBlobUploadPagesHeaders, - PageBlobUploadPagesExceptionHeaders, - PageBlobClearPagesHeaders, - PageBlobClearPagesExceptionHeaders, - PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobGetPageRangesHeaders, - PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesDiffHeaders, - PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobResizeHeaders, - PageBlobResizeExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders, - PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobCopyIncrementalHeaders, - PageBlobCopyIncrementalExceptionHeaders, - AppendBlobCreateHeaders, - AppendBlobCreateExceptionHeaders, - AppendBlobAppendBlockHeaders, - AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobSealHeaders, - AppendBlobSealExceptionHeaders, - BlockBlobUploadHeaders, - BlockBlobUploadExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobStageBlockHeaders, - BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobCommitBlockListHeaders, - BlockBlobCommitBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders - }); - var contentType = { - parameterPath: ["options", "contentType"], + var action3 = { + parameterPath: "action", mapper: { - defaultValue: "application/xml", + defaultValue: "break", isConstant: true, - serializedName: "Content-Type", + serializedName: "x-ms-lease-action", type: { name: "String" } } }; - var blobServiceProperties = { - parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties + var breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } }; - var accept = { - parameterPath: "accept", + var action4 = { + parameterPath: "action", mapper: { - defaultValue: "application/xml", + defaultValue: "change", isConstant: true, - serializedName: "Accept", + serializedName: "x-ms-lease-action", type: { name: "String" } } }; - var url = { - parameterPath: "url", + var proposedLeaseId1 = { + parameterPath: "proposedLeaseId", mapper: { - serializedName: "url", + serializedName: "x-ms-proposed-lease-id", required: true, - xmlName: "url", + xmlName: "x-ms-proposed-lease-id", type: { name: "String" } + } + }; + var include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } }, - skipEncoding: true + collectionFormat: coreHttp.QueryCollectionFormat.Csv }; - var restype = { - parameterPath: "restype", + var delimiter = { + parameterPath: "delimiter", mapper: { - defaultValue: "service", - isConstant: true, - serializedName: "restype", + serializedName: "delimiter", + required: true, + xmlName: "delimiter", type: { name: "String" } } }; - var comp = { - parameterPath: "comp", + var snapshot = { + parameterPath: ["options", "snapshot"], mapper: { - defaultValue: "properties", - isConstant: true, - serializedName: "comp", + serializedName: "snapshot", + xmlName: "snapshot", type: { name: "String" } } }; - var timeoutInSeconds = { - parameterPath: ["options", "timeoutInSeconds"], + var versionId = { + parameterPath: ["options", "versionId"], mapper: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "timeout", - xmlName: "timeout", + serializedName: "versionid", + xmlName: "versionid", type: { - name: "Number" + name: "String" } } }; - var version3 = { - parameterPath: "version", + var range = { + parameterPath: ["options", "range"], mapper: { - defaultValue: "2023-11-03", - isConstant: true, - serializedName: "x-ms-version", + serializedName: "x-ms-range", + xmlName: "x-ms-range", type: { name: "String" } } }; - var requestId = { - parameterPath: ["options", "requestId"], + var rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], mapper: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } + }; + var rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } + }; + var encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", type: { name: "String" } } }; - var accept1 = { - parameterPath: "accept", + var encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } + }; + var encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } + }; + var ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } + }; + var ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } + }; + var ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } + }; + var deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } + }; + var blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", type: { name: "String" } } }; - var comp1 = { + var comp11 = { parameterPath: "comp", mapper: { - defaultValue: "stats", + defaultValue: "expiry", isConstant: true, serializedName: "comp", type: { @@ -74637,109 +71134,91 @@ var require_dist4 = __commonJS({ } } }; - var comp2 = { - parameterPath: "comp", + var expiryOptions = { + parameterPath: "expiryOptions", mapper: { - defaultValue: "list", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", type: { name: "String" } } }; - var prefix = { - parameterPath: ["options", "prefix"], + var expiresOn = { + parameterPath: ["options", "expiresOn"], mapper: { - serializedName: "prefix", - xmlName: "prefix", + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", type: { name: "String" } } }; - var marker = { - parameterPath: ["options", "marker"], + var blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], mapper: { - serializedName: "marker", - xmlName: "marker", + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", type: { name: "String" } } }; - var maxPageSize = { - parameterPath: ["options", "maxPageSize"], + var blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], mapper: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "maxresults", - xmlName: "maxresults", + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", type: { - name: "Number" + name: "String" } } }; - var include = { - parameterPath: ["options", "include"], + var blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListContainersIncludeType", + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } + name: "ByteArray" } - }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv - }; - var keyInfo = { - parameterPath: "keyInfo", - mapper: KeyInfo + } }; - var comp3 = { - parameterPath: "comp", + var blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], mapper: { - defaultValue: "userdelegationkey", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", type: { name: "String" } } }; - var restype1 = { - parameterPath: "restype", + var blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], mapper: { - defaultValue: "account", - isConstant: true, - serializedName: "restype", + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", type: { name: "String" } } }; - var body = { - parameterPath: "body", + var blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], mapper: { - serializedName: "body", - required: true, - xmlName: "body", + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", type: { - name: "Stream" + name: "String" } } }; - var comp4 = { + var comp12 = { parameterPath: "comp", mapper: { - defaultValue: "batch", + defaultValue: "immutabilityPolicies", isConstant: true, serializedName: "comp", type: { @@ -74747,32 +71226,31 @@ var require_dist4 = __commonJS({ } } }; - var contentLength = { - parameterPath: "contentLength", + var immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], mapper: { - serializedName: "Content-Length", - required: true, - xmlName: "Content-Length", + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", type: { - name: "Number" + name: "DateTimeRfc1123" } } }; - var multipartContentType = { - parameterPath: "multipartContentType", + var immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], mapper: { - serializedName: "Content-Type", - required: true, - xmlName: "Content-Type", + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", type: { - name: "String" + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] } } }; - var comp5 = { + var comp13 = { parameterPath: "comp", mapper: { - defaultValue: "blobs", + defaultValue: "legalhold", isConstant: true, serializedName: "comp", type: { @@ -74780,215 +71258,225 @@ var require_dist4 = __commonJS({ } } }; - var where = { - parameterPath: ["options", "where"], + var legalHold = { + parameterPath: "legalHold", mapper: { - serializedName: "where", - xmlName: "where", + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + }; + var encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { name: "String" } } }; - var restype2 = { - parameterPath: "restype", + var comp14 = { + parameterPath: "comp", mapper: { - defaultValue: "container", + defaultValue: "snapshot", isConstant: true, - serializedName: "restype", + serializedName: "comp", type: { name: "String" } } }; - var metadata = { - parameterPath: ["options", "metadata"], + var tier = { + parameterPath: ["options", "tier"], mapper: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", type: { - name: "Dictionary", - value: { type: { name: "String" } } - }, - headerCollectionPrefix: "x-ms-meta-" + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] + } } }; - var access = { - parameterPath: ["options", "access"], + var rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], mapper: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["container", "blob"] + allowedValues: ["High", "Standard"] } } }; - var defaultEncryptionScope = { + var sourceIfModifiedSince = { parameterPath: [ "options", - "containerEncryptionScope", - "defaultEncryptionScope" + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" ], mapper: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", type: { - name: "String" + name: "DateTimeRfc1123" } } }; - var preventEncryptionScopeOverride = { + var sourceIfUnmodifiedSince = { parameterPath: [ "options", - "containerEncryptionScope", - "preventEncryptionScopeOverride" + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" ], mapper: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", type: { - name: "Boolean" + name: "DateTimeRfc1123" } } }; - var leaseId = { - parameterPath: ["options", "leaseAccessConditions", "leaseId"], + var sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], mapper: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", type: { name: "String" } } }; - var ifModifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], - mapper: { - serializedName: "If-Modified-Since", - xmlName: "If-Modified-Since", - type: { - name: "DateTimeRfc1123" - } - } - }; - var ifUnmodifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + var sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], mapper: { - serializedName: "If-Unmodified-Since", - xmlName: "If-Unmodified-Since", + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", type: { - name: "DateTimeRfc1123" + name: "String" } } }; - var comp6 = { - parameterPath: "comp", + var sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], mapper: { - defaultValue: "metadata", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", type: { name: "String" } } }; - var comp7 = { - parameterPath: "comp", + var copySource = { + parameterPath: "copySource", mapper: { - defaultValue: "acl", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", type: { name: "String" } } }; - var containerAcl = { - parameterPath: ["options", "containerAcl"], + var blobTagsString = { + parameterPath: ["options", "blobTagsString"], mapper: { - serializedName: "containerAcl", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier", + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "SignedIdentifier" - } - } + name: "String" } } }; - var comp8 = { - parameterPath: "comp", + var sealBlob = { + parameterPath: ["options", "sealBlob"], mapper: { - defaultValue: "undelete", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", type: { - name: "String" + name: "Boolean" } } }; - var deletedContainerName = { - parameterPath: ["options", "deletedContainerName"], + var legalHold1 = { + parameterPath: ["options", "legalHold"], mapper: { - serializedName: "x-ms-deleted-container-name", - xmlName: "x-ms-deleted-container-name", + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", type: { - name: "String" + name: "Boolean" } } }; - var deletedContainerVersion = { - parameterPath: ["options", "deletedContainerVersion"], + var xMsRequiresSync = { + parameterPath: "xMsRequiresSync", mapper: { - serializedName: "x-ms-deleted-container-version", - xmlName: "x-ms-deleted-container-version", + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", type: { name: "String" } } }; - var comp9 = { - parameterPath: "comp", + var sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], mapper: { - defaultValue: "rename", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", type: { - name: "String" + name: "ByteArray" } } }; - var sourceContainerName = { - parameterPath: "sourceContainerName", + var copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], mapper: { - serializedName: "x-ms-source-container-name", - required: true, - xmlName: "x-ms-source-container-name", + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", type: { name: "String" } } }; - var sourceLeaseId = { - parameterPath: ["options", "sourceLeaseId"], + var copySourceTags = { + parameterPath: ["options", "copySourceTags"], mapper: { - serializedName: "x-ms-source-lease-id", - xmlName: "x-ms-source-lease-id", + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", type: { - name: "String" + name: "Enum", + allowedValues: ["REPLACE", "COPY"] } } }; - var comp10 = { + var comp15 = { parameterPath: "comp", mapper: { - defaultValue: "lease", + defaultValue: "copy", isConstant: true, serializedName: "comp", type: { @@ -74996,288 +71484,304 @@ var require_dist4 = __commonJS({ } } }; - var action = { - parameterPath: "action", + var copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", mapper: { - defaultValue: "acquire", + defaultValue: "abort", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "x-ms-copy-action", type: { name: "String" } } }; - var duration = { - parameterPath: ["options", "duration"], - mapper: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Number" - } - } - }; - var proposedLeaseId = { - parameterPath: ["options", "proposedLeaseId"], + var copyId = { + parameterPath: "copyId", mapper: { - serializedName: "x-ms-proposed-lease-id", - xmlName: "x-ms-proposed-lease-id", + serializedName: "copyid", + required: true, + xmlName: "copyid", type: { name: "String" } } }; - var action1 = { - parameterPath: "action", + var comp16 = { + parameterPath: "comp", mapper: { - defaultValue: "release", + defaultValue: "tier", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var leaseId1 = { - parameterPath: "leaseId", + var tier1 = { + parameterPath: "tier", mapper: { - serializedName: "x-ms-lease-id", + serializedName: "x-ms-access-tier", required: true, - xmlName: "x-ms-lease-id", + xmlName: "x-ms-access-tier", type: { - name: "String" + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold" + ] } } }; - var action2 = { - parameterPath: "action", + var queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest + }; + var comp17 = { + parameterPath: "comp", mapper: { - defaultValue: "renew", + defaultValue: "query", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var action3 = { - parameterPath: "action", + var comp18 = { + parameterPath: "comp", mapper: { - defaultValue: "break", + defaultValue: "tags", isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "comp", type: { name: "String" } } }; - var breakPeriod = { - parameterPath: ["options", "breakPeriod"], + var tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags + }; + var transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], mapper: { - serializedName: "x-ms-lease-break-period", - xmlName: "x-ms-lease-break-period", + serializedName: "Content-MD5", + xmlName: "Content-MD5", type: { - name: "Number" + name: "ByteArray" } } }; - var action4 = { - parameterPath: "action", + var transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], mapper: { - defaultValue: "change", - isConstant: true, - serializedName: "x-ms-lease-action", + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", type: { - name: "String" + name: "ByteArray" } } }; - var proposedLeaseId1 = { - parameterPath: "proposedLeaseId", + var blobType = { + parameterPath: "blobType", mapper: { - serializedName: "x-ms-proposed-lease-id", - required: true, - xmlName: "x-ms-proposed-lease-id", + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { name: "String" } } }; - var include1 = { - parameterPath: ["options", "include"], + var blobContentLength = { + parameterPath: "blobContentLength", mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListBlobsIncludeItem", + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", type: { - name: "Sequence", - element: { - type: { - name: "Enum", - allowedValues: [ - "copy", - "deleted", - "metadata", - "snapshots", - "uncommittedblobs", - "versions", - "tags", - "immutabilitypolicy", - "legalhold", - "deletedwithversions" - ] - } - } + name: "Number" } - }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv + } }; - var delimiter = { - parameterPath: "delimiter", + var blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], mapper: { - serializedName: "delimiter", - required: true, - xmlName: "delimiter", + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "String" + name: "Number" } } }; - var snapshot = { - parameterPath: ["options", "snapshot"], + var contentType1 = { + parameterPath: ["options", "contentType"], mapper: { - serializedName: "snapshot", - xmlName: "snapshot", + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", type: { name: "String" } } }; - var versionId = { - parameterPath: ["options", "versionId"], + var body1 = { + parameterPath: "body", mapper: { - serializedName: "versionid", - xmlName: "versionid", + serializedName: "body", + required: true, + xmlName: "body", type: { - name: "String" + name: "Stream" } } }; - var range = { - parameterPath: ["options", "range"], + var accept2 = { + parameterPath: "accept", mapper: { - serializedName: "x-ms-range", - xmlName: "x-ms-range", + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", type: { name: "String" } } }; - var rangeGetContentMD5 = { - parameterPath: ["options", "rangeGetContentMD5"], + var comp19 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-range-get-content-md5", - xmlName: "x-ms-range-get-content-md5", + defaultValue: "page", + isConstant: true, + serializedName: "comp", type: { - name: "Boolean" + name: "String" } } }; - var rangeGetContentCRC64 = { - parameterPath: ["options", "rangeGetContentCRC64"], + var pageWrite = { + parameterPath: "pageWrite", mapper: { - serializedName: "x-ms-range-get-content-crc64", - xmlName: "x-ms-range-get-content-crc64", + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", type: { - name: "Boolean" + name: "String" } } }; - var encryptionKey = { - parameterPath: ["options", "cpkInfo", "encryptionKey"], + var ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], mapper: { - serializedName: "x-ms-encryption-key", - xmlName: "x-ms-encryption-key", + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", type: { - name: "String" + name: "Number" } } }; - var encryptionKeySha256 = { - parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + var ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], mapper: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", type: { - name: "String" + name: "Number" } } }; - var encryptionAlgorithm = { - parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + var ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], mapper: { - serializedName: "x-ms-encryption-algorithm", - xmlName: "x-ms-encryption-algorithm", + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", type: { - name: "String" + name: "Number" } } }; - var ifMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + var pageWrite1 = { + parameterPath: "pageWrite", mapper: { - serializedName: "If-Match", - xmlName: "If-Match", + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", type: { name: "String" } } }; - var ifNoneMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + var sourceUrl = { + parameterPath: "sourceUrl", mapper: { - serializedName: "If-None-Match", - xmlName: "If-None-Match", + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", type: { name: "String" } } }; - var ifTags = { - parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + var sourceRange = { + parameterPath: "sourceRange", mapper: { - serializedName: "x-ms-if-tags", - xmlName: "x-ms-if-tags", + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", type: { name: "String" } } }; - var deleteSnapshots = { - parameterPath: ["options", "deleteSnapshots"], + var sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], mapper: { - serializedName: "x-ms-delete-snapshots", - xmlName: "x-ms-delete-snapshots", + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", type: { - name: "Enum", - allowedValues: ["include", "only"] + name: "ByteArray" } } }; - var blobDeleteType = { - parameterPath: ["options", "blobDeleteType"], + var range1 = { + parameterPath: "range", mapper: { - serializedName: "deletetype", - xmlName: "deletetype", + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", type: { name: "String" } } }; - var comp11 = { + var comp20 = { parameterPath: "comp", mapper: { - defaultValue: "expiry", + defaultValue: "pagelist", isConstant: true, serializedName: "comp", type: { @@ -75285,91 +71789,109 @@ var require_dist4 = __commonJS({ } } }; - var expiryOptions = { - parameterPath: "expiryOptions", + var prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], mapper: { - serializedName: "x-ms-expiry-option", - required: true, - xmlName: "x-ms-expiry-option", + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", type: { name: "String" } } }; - var expiresOn = { - parameterPath: ["options", "expiresOn"], + var prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], mapper: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", type: { name: "String" } } }; - var blobCacheControl = { - parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + var sequenceNumberAction = { + parameterPath: "sequenceNumberAction", mapper: { - serializedName: "x-ms-blob-cache-control", - xmlName: "x-ms-blob-cache-control", + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", type: { - name: "String" + name: "Enum", + allowedValues: ["max", "update", "increment"] } } }; - var blobContentType = { - parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + var comp21 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-blob-content-type", - xmlName: "x-ms-blob-content-type", + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", type: { name: "String" } } }; - var blobContentMD5 = { - parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + var blobType1 = { + parameterPath: "blobType", mapper: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { - name: "ByteArray" + name: "String" } } }; - var blobContentEncoding = { - parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + var comp22 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-blob-content-encoding", - xmlName: "x-ms-blob-content-encoding", + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", type: { name: "String" } } }; - var blobContentLanguage = { - parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + var maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], mapper: { - serializedName: "x-ms-blob-content-language", - xmlName: "x-ms-blob-content-language", + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", type: { - name: "String" + name: "Number" } } }; - var blobContentDisposition = { - parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + var appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], mapper: { - serializedName: "x-ms-blob-content-disposition", - xmlName: "x-ms-blob-content-disposition", + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } + }; + var sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", type: { name: "String" } } }; - var comp12 = { + var comp23 = { parameterPath: "comp", mapper: { - defaultValue: "immutabilityPolicies", + defaultValue: "seal", isConstant: true, serializedName: "comp", type: { @@ -75377,31 +71899,31 @@ var require_dist4 = __commonJS({ } } }; - var immutabilityPolicyExpiry = { - parameterPath: ["options", "immutabilityPolicyExpiry"], + var blobType2 = { + parameterPath: "blobType", mapper: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", type: { - name: "DateTimeRfc1123" + name: "String" } } }; - var immutabilityPolicyMode = { - parameterPath: ["options", "immutabilityPolicyMode"], + var copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], mapper: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] + name: "Boolean" } } }; - var comp13 = { + var comp24 = { parameterPath: "comp", mapper: { - defaultValue: "legalhold", + defaultValue: "block", isConstant: true, serializedName: "comp", type: { @@ -75409,787 +71931,1411 @@ var require_dist4 = __commonJS({ } } }; - var legalHold = { - parameterPath: "legalHold", + var blockId = { + parameterPath: "blockId", mapper: { - serializedName: "x-ms-legal-hold", + serializedName: "blockid", required: true, - xmlName: "x-ms-legal-hold", + xmlName: "blockid", type: { - name: "Boolean" + name: "String" } } }; - var encryptionScope = { - parameterPath: ["options", "encryptionScope"], + var blocks = { + parameterPath: "blocks", + mapper: BlockLookupList + }; + var comp25 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", type: { name: "String" } - } + } + }; + var listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } + }; + var Service = class { + static { + __name(this, "Service"); + } + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties2, options) { + const operationArguments = { + blobServiceProperties: blobServiceProperties2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo2, options) { + const operationArguments = { + keyInfo: keyInfo2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength2, multipartContentType2, body2, options) { + const operationArguments = { + contentLength: contentLength2, + multipartContentType: multipartContentType2, + body: body2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + } + }; + var xmlSerializer$5 = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + true + ); + var setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 + }; + var getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 + }; + var getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 + }; + var listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 }; - var comp14 = { - parameterPath: "comp", - mapper: { - defaultValue: "snapshot", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders } - } + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 }; - var tier = { - parameterPath: ["options", "tier"], - mapper: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] + var getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version3, accept1], + isXML: true, + serializer: xmlSerializer$5 }; - var rehydratePriority = { - parameterPath: ["options", "rehydratePriority"], - mapper: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] + var submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders } - } - }; - var sourceIfModifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId, + contentLength, + multipartContentType ], - mapper: { - serializedName: "x-ms-source-if-modified-since", - xmlName: "x-ms-source-if-modified-since", - type: { - name: "DateTimeRfc1123" - } - } + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 }; - var sourceIfUnmodifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" - ], - mapper: { - serializedName: "x-ms-source-if-unmodified-since", - xmlName: "x-ms-source-if-unmodified-since", - type: { - name: "DateTimeRfc1123" + var filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 }; - var sourceIfMatch = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], - mapper: { - serializedName: "x-ms-source-if-match", - xmlName: "x-ms-source-if-match", - type: { - name: "String" - } + var Container = class { + static { + __name(this, "Container"); } - }; - var sourceIfNoneMatch = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfNoneMatch" - ], - mapper: { - serializedName: "x-ms-source-if-none-match", - xmlName: "x-ms-source-if-none-match", - type: { - name: "String" - } + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var sourceIfTags = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], - mapper: { - serializedName: "x-ms-source-if-tags", - xmlName: "x-ms-source-if-tags", - type: { - name: "String" - } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); } - }; - var copySource = { - parameterPath: "copySource", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" - } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); } - }; - var blobTagsString = { - parameterPath: ["options", "blobTagsString"], - mapper: { - serializedName: "x-ms-tags", - xmlName: "x-ms-tags", - type: { - name: "String" - } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); } - }; - var sealBlob = { - parameterPath: ["options", "sealBlob"], - mapper: { - serializedName: "x-ms-seal-blob", - xmlName: "x-ms-seal-blob", - type: { - name: "Boolean" - } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); } - }; - var legalHold1 = { - parameterPath: ["options", "legalHold"], - mapper: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", - type: { - name: "Boolean" - } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); } - }; - var xMsRequiresSync = { - parameterPath: "xMsRequiresSync", - mapper: { - defaultValue: "true", - isConstant: true, - serializedName: "x-ms-requires-sync", - type: { - name: "String" - } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); } - }; - var sourceContentMD5 = { - parameterPath: ["options", "sourceContentMD5"], - mapper: { - serializedName: "x-ms-source-content-md5", - xmlName: "x-ms-source-content-md5", - type: { - name: "ByteArray" - } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); } - }; - var copySourceAuthorization = { - parameterPath: ["options", "copySourceAuthorization"], - mapper: { - serializedName: "x-ms-copy-source-authorization", - xmlName: "x-ms-copy-source-authorization", - type: { - name: "String" - } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName2, options) { + const operationArguments = { + sourceContainerName: sourceContainerName2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); } - }; - var copySourceTags = { - parameterPath: ["options", "copySourceTags"], - mapper: { - serializedName: "x-ms-copy-source-tag-option", - xmlName: "x-ms-copy-source-tag-option", - type: { - name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength2, multipartContentType2, body2, options) { + const operationArguments = { + contentLength: contentLength2, + multipartContentType: multipartContentType2, + body: body2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); } - }; - var comp15 = { - parameterPath: "comp", - mapper: { - defaultValue: "copy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); } - }; - var copyActionAbortConstant = { - parameterPath: "copyActionAbortConstant", - mapper: { - defaultValue: "abort", - isConstant: true, - serializedName: "x-ms-copy-action", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); } - }; - var copyId = { - parameterPath: "copyId", - mapper: { - serializedName: "copyid", - required: true, - xmlName: "copyid", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); } - }; - var comp16 = { - parameterPath: "comp", - mapper: { - defaultValue: "tier", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId2, proposedLeaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + proposedLeaseId: proposedLeaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); } - }; - var tier1 = { - parameterPath: "tier", - mapper: { - serializedName: "x-ms-access-tier", - required: true, - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); } - }; - var queryRequest = { - parameterPath: ["options", "queryRequest"], - mapper: QueryRequest - }; - var comp17 = { - parameterPath: "comp", - mapper: { - defaultValue: "query", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter2, options) { + const operationArguments = { + delimiter: delimiter2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); } - }; - var comp18 = { - parameterPath: "comp", - mapper: { - defaultValue: "tags", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); } }; - var tags = { - parameterPath: ["options", "tags"], - mapper: BlobTags - }; - var transactionalContentMD5 = { - parameterPath: ["options", "transactionalContentMD5"], - mapper: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" + var xmlSerializer$4 = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + true + ); + var createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders } - } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer$4 }; - var transactionalContentCrc64 = { - parameterPath: ["options", "transactionalContentCrc64"], - mapper: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" + var getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders } - } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 }; - var blobType = { - parameterPath: "blobType", - mapper: { - defaultValue: "PageBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" + var deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders } - } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 }; - var blobContentLength = { - parameterPath: "blobContentLength", - mapper: { - serializedName: "x-ms-blob-content-length", - required: true, - xmlName: "x-ms-blob-content-length", - type: { - name: "Number" + var setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 }; - var blobSequenceNumber = { - parameterPath: ["options", "blobSequenceNumber"], - mapper: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" + var getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 }; - var contentType1 = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/octet-stream", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" + var setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders } - } + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 }; - var body1 = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" + var restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer$4 }; - var accept2 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" + var renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + sourceContainerName, + sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 }; - var comp19 = { - parameterPath: "comp", - mapper: { - defaultValue: "page", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders } - } + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 }; - var pageWrite = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "update", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + var filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders } - } - }; - var ifSequenceNumberLessThanOrEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 ], - mapper: { - serializedName: "x-ms-if-sequence-number-le", - xmlName: "x-ms-if-sequence-number-le", - type: { - name: "Number" - } - } - }; - var ifSequenceNumberLessThan = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 ], - mapper: { - serializedName: "x-ms-if-sequence-number-lt", - xmlName: "x-ms-if-sequence-number-lt", - type: { - name: "Number" - } - } + isXML: true, + serializer: xmlSerializer$4 }; - var ifSequenceNumberEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" + var acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId ], - mapper: { - serializedName: "x-ms-if-sequence-number-eq", - xmlName: "x-ms-if-sequence-number-eq", - type: { - name: "Number" - } - } + isXML: true, + serializer: xmlSerializer$4 }; - var pageWrite1 = { - parameterPath: "pageWrite", - mapper: { - defaultValue: "clear", - isConstant: true, - serializedName: "x-ms-page-write", - type: { - name: "String" + var releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 }; - var sourceUrl = { - parameterPath: "sourceUrl", - mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" + var renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2 + ], + isXML: true, + serializer: xmlSerializer$4 }; - var sourceRange = { - parameterPath: "sourceRange", - mapper: { - serializedName: "x-ms-source-range", - required: true, - xmlName: "x-ms-source-range", - type: { - name: "String" + var breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod + ], + isXML: true, + serializer: xmlSerializer$4 }; - var sourceContentCrc64 = { - parameterPath: ["options", "sourceContentCrc64"], - mapper: { - serializedName: "x-ms-source-content-crc64", - xmlName: "x-ms-source-content-crc64", - type: { - name: "ByteArray" + var changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 }; - var range1 = { - parameterPath: "range", - mapper: { - serializedName: "x-ms-range", - required: true, - xmlName: "x-ms-range", - type: { - name: "String" + var listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1 + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 }; - var comp20 = { - parameterPath: "comp", - mapper: { - defaultValue: "pagelist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" + var listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders } - } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 }; - var prevsnapshot = { - parameterPath: ["options", "prevsnapshot"], - mapper: { - serializedName: "prevsnapshot", - xmlName: "prevsnapshot", - type: { - name: "String" + var getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders } - } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version3, accept1], + isXML: true, + serializer: xmlSerializer$4 }; - var prevSnapshotUrl = { - parameterPath: ["options", "prevSnapshotUrl"], - mapper: { - serializedName: "x-ms-previous-snapshot-url", - xmlName: "x-ms-previous-snapshot-url", - type: { - name: "String" - } + var Blob$1 = class { + static { + __name(this, "Blob$1"); } - }; - var sequenceNumberAction = { - parameterPath: "sequenceNumberAction", - mapper: { - serializedName: "x-ms-sequence-number-action", - required: true, - xmlName: "x-ms-sequence-number-action", - type: { - name: "Enum", - allowedValues: ["max", "update", "increment"] - } + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - }; - var comp21 = { - parameterPath: "comp", - mapper: { - defaultValue: "incrementalcopy", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); } - }; - var blobType1 = { - parameterPath: "blobType", - mapper: { - defaultValue: "AppendBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" - } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); } - }; - var comp22 = { - parameterPath: "comp", - mapper: { - defaultValue: "appendblock", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); } - }; - var maxSize = { - parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], - mapper: { - serializedName: "x-ms-blob-condition-maxsize", - xmlName: "x-ms-blob-condition-maxsize", - type: { - name: "Number" - } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); } - }; - var appendPosition = { - parameterPath: [ - "options", - "appendPositionAccessConditions", - "appendPosition" - ], - mapper: { - serializedName: "x-ms-blob-condition-appendpos", - xmlName: "x-ms-blob-condition-appendpos", - type: { - name: "Number" - } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions2, options) { + const operationArguments = { + expiryOptions: expiryOptions2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); } - }; - var sourceRange1 = { - parameterPath: ["options", "sourceRange"], - mapper: { - serializedName: "x-ms-source-range", - xmlName: "x-ms-source-range", - type: { - name: "String" - } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); } - }; - var comp23 = { - parameterPath: "comp", - mapper: { - defaultValue: "seal", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); } - }; - var blobType2 = { - parameterPath: "blobType", - mapper: { - defaultValue: "BlockBlob", - isConstant: true, - serializedName: "x-ms-blob-type", - type: { - name: "String" - } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); } - }; - var copySourceBlobProperties = { - parameterPath: ["options", "copySourceBlobProperties"], - mapper: { - serializedName: "x-ms-copy-source-blob-properties", - xmlName: "x-ms-copy-source-blob-properties", - type: { - name: "Boolean" - } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold2, options) { + const operationArguments = { + legalHold: legalHold2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); } - }; - var comp24 = { - parameterPath: "comp", - mapper: { - defaultValue: "block", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); } - }; - var blockId = { - parameterPath: "blockId", - mapper: { - serializedName: "blockid", - required: true, - xmlName: "blockid", - type: { - name: "String" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); } - }; - var blocks = { - parameterPath: "blocks", - mapper: BlockLookupList - }; - var comp25 = { - parameterPath: "comp", - mapper: { - defaultValue: "blocklist", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); } - }; - var listType = { - parameterPath: "listType", - mapper: { - defaultValue: "committed", - serializedName: "blocklisttype", - required: true, - xmlName: "blocklisttype", - type: { - name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); } - }; - var Service = class { - static { - __name(this, "Service"); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId2, proposedLeaseId2, options) { + const operationArguments = { + leaseId: leaseId2, + proposedLeaseId: proposedLeaseId2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); } /** - * Initialize a new instance of the class Service class. - * @param client Reference to the service client + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. */ - constructor(client) { - this.client = client; + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); } /** - * Sets properties for a storage account's Blob service endpoint, including properties for Storage - * Analytics and CORS (Cross-Origin Resource Sharing) rules - * @param blobServiceProperties The StorageService properties. + * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ - setProperties(blobServiceProperties2, options) { + createSnapshot(options) { const operationArguments = { - blobServiceProperties: blobServiceProperties2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } /** - * gets the properties of a storage account's Blob service, including properties for Storage Analytics - * and CORS (Cross-Origin Resource Sharing) rules. + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. * @param options The options parameters. */ - getProperties(options) { + startCopyFromURL(copySource2, options) { const operationArguments = { + copySource: copySource2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } /** - * Retrieves statistics related to replication for the Blob service. It is only available on the - * secondary location endpoint when read-access geo-redundant replication is enabled for the storage - * account. + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. * @param options The options parameters. */ - getStatistics(options) { + copyFromURL(copySource2, options) { const operationArguments = { + copySource: copySource2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); } /** - * The List Containers Segment operation returns a list of the containers under the specified account + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. * @param options The options parameters. */ - listContainersSegment(options) { + abortCopyFromURL(copyId2, options) { const operationArguments = { + copyId: copyId2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); } /** - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * @param keyInfo Key information + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. * @param options The options parameters. */ - getUserDelegationKey(keyInfo2, options) { + setTier(tier2, options) { const operationArguments = { - keyInfo: keyInfo2, + tier: tier2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); } /** * Returns the sku name and account kind @@ -76199,8241 +73345,9483 @@ var require_dist4 = __commonJS({ const operationArguments = { options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); } /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. * @param options The options parameters. */ - submitBatch(contentLength2, multipartContentType2, body2, options) { + query(options) { const operationArguments = { - contentLength: contentLength2, - multipartContentType: multipartContentType2, - body: body2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a - * given search expression. Filter blobs searches across all containers within a storage account but - * can be scoped within the expression to a single container. + * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ - filterBlobs(options) { + getTags(options) { const operationArguments = { options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); } }; - var xmlSerializer$5 = new coreHttp__namespace.Serializer( + var xmlSerializer$3 = new coreHttp__namespace.Serializer( Mappers, /* isXml */ true ); - var setPropertiesOperationSpec = { - path: "/", + var downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType + ], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + expiryOptions, + expiresOn + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + legalHold + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 202: { - headersMapper: ServiceSetPropertiesHeaders + 200: { + headersMapper: BlobReleaseLeaseHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders + headersMapper: BlobReleaseLeaseExceptionHeaders } }, - requestBody: blobServiceProperties, - queryParameters: [ - restype, - comp, - timeoutInSeconds + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags ], + isXML: true, + serializer: xmlSerializer$3 + }; + var renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - contentType, - accept, version3, - requestId + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var getPropertiesOperationSpec$2 = { - path: "/", - httpMethod: "GET", + var changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { 200: { - bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders + headersMapper: BlobChangeLeaseHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders + headersMapper: BlobChangeLeaseExceptionHeaders } }, - queryParameters: [ - restype, - comp, - timeoutInSeconds + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags ], + isXML: true, + serializer: xmlSerializer$3 + }; + var breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version3, requestId, - accept1 + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var getStatisticsOperationSpec = { - path: "/", - httpMethod: "GET", + var createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders + 201: { + headersMapper: BlobCreateSnapshotHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders + headersMapper: BlobCreateSnapshotExceptionHeaders } }, - queryParameters: [ - restype, - timeoutInSeconds, - comp1 + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope ], + isXML: true, + serializer: xmlSerializer$3 + }; + var startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version3, requestId, - accept1 + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var listContainersSegmentOperationSpec = { - path: "/", - httpMethod: "GET", + var copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders + 202: { + headersMapper: BlobCopyFromURLHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders + headersMapper: BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags + ], + isXML: true, + serializer: xmlSerializer$3 + }; + var abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - include + comp15, + copyId ], urlParameters: [url], headerParameters: [ version3, requestId, - accept1 + accept1, + leaseId, + copyActionAbortConstant ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var getUserDelegationKeyOperationSpec = { - path: "/", - httpMethod: "POST", + var setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { 200: { - bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders + headersMapper: BlobSetTierHeaders + }, + 202: { + headersMapper: BlobSetTierHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + headersMapper: BlobSetTierExceptionHeaders } }, - requestBody: keyInfo, queryParameters: [ - restype, timeoutInSeconds, - comp3 + snapshot, + versionId, + comp16 ], urlParameters: [url], headerParameters: [ - contentType, - accept, version3, - requestId + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1 ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var getAccountInfoOperationSpec$2 = { - path: "/", + var getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - headersMapper: ServiceGetAccountInfoHeaders + headersMapper: BlobGetAccountInfoHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders + headersMapper: BlobGetAccountInfoExceptionHeaders } }, queryParameters: [comp, restype1], urlParameters: [url], headerParameters: [version3, accept1], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var submitBatchOperationSpec$1 = { - path: "/", + var queryOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "POST", responses: { - 202: { + 200: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, - headersMapper: ServiceSubmitBatchHeaders + headersMapper: BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders + headersMapper: BlobQueryExceptionHeaders } }, - requestBody: body, - queryParameters: [timeoutInSeconds, comp4], + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17 + ], urlParameters: [url], headerParameters: [ contentType, accept, version3, requestId, - contentLength, - multipartContentType + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var filterBlobsOperationSpec$1 = { - path: "/", + var getTagsOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders }, default: { bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders + headersMapper: BlobGetTagsExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - marker, - maxPageSize, - comp5, - where + snapshot, + versionId, + comp18 ], urlParameters: [url], headerParameters: [ version3, requestId, - accept1 + accept1, + leaseId, + ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer$3 }; - var Container = class { + var setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders + } + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version3, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 + }; + var PageBlob = class { static { - __name(this, "Container"); + __name(this, "PageBlob"); } /** - * Initialize a new instance of the class Container class. + * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** - * creates a new container under the specified account. If the container with the same name already - * exists, the operation fails - * @param options The options parameters. - */ - create(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); - } - /** - * returns all user-defined metadata and system properties for the specified container. The data - * returned does not include the container's list of blobs - * @param options The options parameters. - */ - getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); - } - /** - * operation marks the specified container for deletion. The container and any blobs contained within - * it are later deleted during garbage collection - * @param options The options parameters. - */ - delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); - } - /** - * operation sets one or more user-defined name-value pairs for the specified container. - * @param options The options parameters. - */ - setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); - } - /** - * gets the permissions for the specified container. The permissions indicate whether container data - * may be accessed publicly. - * @param options The options parameters. - */ - getAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); - } - /** - * sets the permissions for the specified container. The permissions indicate whether blobs in a - * container may be accessed publicly. - * @param options The options parameters. - */ - setAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); - } - /** - * Restores a previously-deleted container. - * @param options The options parameters. - */ - restore(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); - } - /** - * Renames an existing container. - * @param sourceContainerName Required. Specifies the name of the container to rename. - * @param options The options parameters. - */ - rename(sourceContainerName2, options) { - const operationArguments = { - sourceContainerName: sourceContainerName2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renameOperationSpec); - } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength2, multipartContentType2, body2, options) { - const operationArguments = { - contentLength: contentLength2, - multipartContentType: multipartContentType2, - body: body2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); - } - /** - * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given - * search expression. Filter blobs searches within the given container. + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - filterBlobs(options) { + create(contentLength2, blobContentLength2, options) { const operationArguments = { + contentLength: contentLength2, + blobContentLength: blobContentLength2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data * @param options The options parameters. */ - acquireLease(options) { + uploadPages(contentLength2, body2, options) { const operationArguments = { + contentLength: contentLength2, + body: body2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. * @param options The options parameters. */ - releaseLease(leaseId2, options) { + clearPages(contentLength2, options) { const operationArguments = { - leaseId: leaseId2, + contentLength: contentLength2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. * @param options The options parameters. */ - renewLease(leaseId2, options) { + uploadPagesFromURL(sourceUrl2, sourceRange2, contentLength2, range2, options) { const operationArguments = { - leaseId: leaseId2, + sourceUrl: sourceUrl2, + sourceRange: sourceRange2, + contentLength: contentLength2, + range: range2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob * @param options The options parameters. */ - breakLease(options) { + getPageRanges(options) { const operationArguments = { options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); } /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. * @param options The options parameters. */ - changeLease(leaseId2, proposedLeaseId2, options) { + getPageRangesDiff(options) { const operationArguments = { - leaseId: leaseId2, - proposedLeaseId: proposedLeaseId2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); } /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ - listBlobFlatSegment(options) { + resize(blobContentLength2, options) { const operationArguments = { + blobContentLength: blobContentLength2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); } /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix - * element in the response body that acts as a placeholder for all blobs whose names begin with the - * same substring up to the appearance of the delimiter character. The delimiter may be a single - * character or a string. + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number * @param options The options parameters. */ - listBlobHierarchySegment(delimiter2, options) { + updateSequenceNumber(sequenceNumberAction2, options) { const operationArguments = { - delimiter: delimiter2, + sequenceNumberAction: sequenceNumberAction2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); } /** - * Returns the sku name and account kind + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. * @param options The options parameters. */ - getAccountInfo(options) { + copyIncremental(copySource2, options) { const operationArguments = { + copySource: copySource2, options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); } }; - var xmlSerializer$4 = new coreHttp__namespace.Serializer( + var xmlSerializer$2 = new coreHttp__namespace.Serializer( Mappers, /* isXml */ true ); - var createOperationSpec$2 = { - path: "/{containerName}", + var serializer$2 = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + false + ); + var createOperationSpec$1 = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerCreateHeaders + headersMapper: PageBlobCreateHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders + headersMapper: PageBlobCreateExceptionHeaders } }, - queryParameters: [timeoutInSeconds, restype2], + queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version3, requestId, accept1, + contentLength, metadata, - access, - defaultEncryptionScope, - preventEncryptionScopeOverride + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$2 }; - var getPropertiesOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", + var uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerGetPropertiesHeaders + 201: { + headersMapper: PageBlobUploadPagesHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders + headersMapper: PageBlobUploadPagesExceptionHeaders } }, - queryParameters: [timeoutInSeconds, restype2], + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer: serializer$2 + }; + var clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version3, requestId, accept1, - leaseId + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$2 }; - var deleteOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "DELETE", + var uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 202: { - headersMapper: ContainerDeleteHeaders + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders } }, - queryParameters: [timeoutInSeconds, restype2], + queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version3, requestId, accept1, + contentLength, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$2 }; - var setMetadataOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", + var getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", responses: { 200: { - headersMapper: ContainerSetMetadataHeaders + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders + headersMapper: PageBlobGetPageRangesExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - restype2, - comp6 + marker, + maxPageSize, + snapshot, + comp20 ], urlParameters: [url], headerParameters: [ version3, requestId, accept1, - metadata, leaseId, - ifModifiedSince + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$2 }; - var getAccessPolicyOperationSpec = { - path: "/{containerName}", + var getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "SignedIdentifier" } - } - }, - serializedName: "SignedIdentifiers", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" - }, - headersMapper: ContainerGetAccessPolicyHeaders + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - restype2, - comp7 + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot ], urlParameters: [url], headerParameters: [ version3, requestId, accept1, - leaseId + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$2 }; - var setAccessPolicyOperationSpec = { - path: "/{containerName}", + var resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength + ], + isXML: true, + serializer: xmlSerializer$2 + }; + var updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer$2 + }; + var copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version3, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource + ], + isXML: true, + serializer: xmlSerializer$2 + }; + var AppendBlob = class { + static { + __name(this, "AppendBlob"); + } + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength2, options) { + const operationArguments = { + contentLength: contentLength2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength2, body2, options) { + const operationArguments = { + contentLength: contentLength2, + body: body2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl2, contentLength2, options) { + const operationArguments = { + sourceUrl: sourceUrl2, + contentLength: contentLength2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } + }; + var xmlSerializer$1 = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + true + ); + var serializer$1 = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + false + ); + var createOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerSetAccessPolicyHeaders + 201: { + headersMapper: AppendBlobCreateHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders + headersMapper: AppendBlobCreateExceptionHeaders } }, - requestBody: containerAcl, - queryParameters: [ - timeoutInSeconds, - restype2, - comp7 - ], + queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - contentType, - accept, version3, requestId, - access, + accept1, + contentLength, + metadata, leaseId, ifModifiedSince, - ifUnmodifiedSince + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1 ], isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; - var restoreOperationSpec = { - path: "/{containerName}", + var appendBlockOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: ContainerRestoreHeaders + headersMapper: AppendBlobAppendBlockHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders + headersMapper: AppendBlobAppendBlockExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp8 - ], + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version3, requestId, - accept1, - deletedContainerName, - deletedContainerVersion + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition ], - isXML: true, - serializer: xmlSerializer$4 + mediaType: "binary", + serializer: serializer$1 }; - var renameOperationSpec = { - path: "/{containerName}", + var appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerRenameHeaders + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp9 - ], + queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version3, requestId, accept1, - sourceContainerName, - sourceLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 - }; - var submitBatchOperationSpec = { - path: "/{containerName}", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ContainerSubmitBatchHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders - } - }, - requestBody: body, - queryParameters: [ - timeoutInSeconds, - comp4, - restype2 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version3, - requestId, contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 - }; - var filterBlobsOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where, - restype2 - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1 + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; - var acquireLeaseOperationSpec$1 = { - path: "/{containerName}", + var sealOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 201: { - headersMapper: ContainerAcquireLeaseHeaders + 200: { + headersMapper: AppendBlobSealHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders + headersMapper: AppendBlobSealExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], + queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], headerParameters: [ version3, requestId, accept1, + leaseId, ifModifiedSince, ifUnmodifiedSince, - action, - duration, - proposedLeaseId + ifMatch, + ifNoneMatch, + appendPosition ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; - var releaseLeaseOperationSpec$1 = { - path: "/{containerName}", + var BlockBlob = class { + static { + __name(this, "BlockBlob"); + } + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength2, body2, options) { + const operationArguments = { + contentLength: contentLength2, + body: body2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength2, copySource2, options) { + const operationArguments = { + contentLength: contentLength2, + copySource: copySource2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId2, contentLength2, body2, options) { + const operationArguments = { + blockId: blockId2, + contentLength: contentLength2, + body: body2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId2, contentLength2, sourceUrl2, options) { + const operationArguments = { + blockId: blockId2, + contentLength: contentLength2, + sourceUrl: sourceUrl2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks2, options) { + const operationArguments = { + blocks: blocks2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType2, options) { + const operationArguments = { + listType: listType2, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } + }; + var xmlSerializer = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + true + ); + var serializer = new coreHttp__namespace.Serializer( + Mappers, + /* isXml */ + false + ); + var uploadOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerReleaseLeaseHeaders + 201: { + headersMapper: BlockBlobUploadHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders + headersMapper: BlockBlobUploadExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], + requestBody: body1, + queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version3, requestId, - accept1, + contentLength, + metadata, + leaseId, ifModifiedSince, ifUnmodifiedSince, - action1, - leaseId1 + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + blobType2 ], - isXML: true, - serializer: xmlSerializer$4 + mediaType: "binary", + serializer }; - var renewLeaseOperationSpec$1 = { - path: "/{containerName}", + var putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerRenewLeaseHeaders + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], + queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version3, requestId, accept1, + contentLength, + metadata, + leaseId, ifModifiedSince, ifUnmodifiedSince, - leaseId1, - action2 + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var breakLeaseOperationSpec$1 = { - path: "/{containerName}", + var stageBlockOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 202: { - headersMapper: ContainerBreakLeaseHeaders + 201: { + headersMapper: BlockBlobStageBlockHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders + headersMapper: BlockBlobStageBlockExceptionHeaders } }, + requestBody: body1, queryParameters: [ timeoutInSeconds, - restype2, - comp10 + comp24, + blockId ], urlParameters: [url], headerParameters: [ version3, requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2 ], - isXML: true, - serializer: xmlSerializer$4 + mediaType: "binary", + serializer }; - var changeLeaseOperationSpec$1 = { - path: "/{containerName}", + var stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { - 200: { - headersMapper: ContainerChangeLeaseHeaders + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - restype2, - comp10 + comp24, + blockId ], urlParameters: [url], headerParameters: [ version3, requestId, accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1 + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var listBlobFlatSegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", + var commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", responses: { - 200: { - bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders + 201: { + headersMapper: BlockBlobCommitBlockListHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + headersMapper: BlockBlobCommitBlockListExceptionHeaders } }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1 - ], + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], urlParameters: [url], headerParameters: [ + contentType, + accept, version3, requestId, - accept1 + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64 ], isXML: true, - serializer: xmlSerializer$4 + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer }; - var listBlobHierarchySegmentOperationSpec = { - path: "/{containerName}", + var getBlockListOperationSpec = { + path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders }, default: { bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + headersMapper: BlockBlobGetBlockListExceptionHeaders } }, queryParameters: [ timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1, - delimiter + snapshot, + comp25, + listType ], urlParameters: [url], headerParameters: [ version3, requestId, - accept1 + accept1, + leaseId, + ifTags ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer }; - var getAccountInfoOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders + var logger = logger$1.createClientLogger("storage-blob"); + var SDK_VERSION = "12.17.0"; + var SERVICE_VERSION = "2023-11-03"; + var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; + var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; + var BLOCK_BLOB_MAX_BLOCKS = 5e4; + var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; + var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; + var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; + var REQUEST_TIMEOUT = 100 * 1e3; + var StorageOAuthScopes = "https://storage.azure.com/.default"; + var URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout" + } + }; + var HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416 + }; + var HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version" + }; + var ETagNone = ""; + var ETagAny = "*"; + var SIZE_1_MB = 1 * 1024 * 1024; + var BATCH_MAX_REQUEST = 256; + var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; + var HTTP_LINE_ENDING = "\r\n"; + var HTTP_VERSION_1_1 = "HTTP/1.1"; + var EncryptionAlgorithmAES25 = "AES256"; + var DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; + var StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags" + ]; + var StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot" + ]; + var BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; + var BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + var PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104" + ]; + function escapeURLPath(url2) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + let path2 = urlParsed.getPath(); + path2 = path2 || "/"; + path2 = escape(path2); + urlParsed.setPath(path2); + return urlParsed.toString(); + } + __name(escapeURLPath, "escapeURLPath"); + function getProxyUriFromDevConnString(connectionString) { + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; + } + __name(getProxyUriFromDevConnString, "getProxyUriFromDevConnString"); + function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; + } + __name(getValueInConnString, "getValueInConnString"); + function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri + }; + } else { + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } + } + __name(extractConnectionStringParts, "extractConnectionStringParts"); + function escape(text) { + return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); + } + __name(escape, "escape"); + function appendToURLPath(url2, name) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + let path2 = urlParsed.getPath(); + path2 = path2 ? path2.endsWith("/") ? `${path2}${name}` : `${path2}/${name}` : name; + urlParsed.setPath(path2); + const normalizedUrl = new URL(urlParsed.toString()); + return normalizedUrl.toString(); + } + __name(appendToURLPath, "appendToURLPath"); + function setURLParameter(url2, name, value) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); + } + __name(setURLParameter, "setURLParameter"); + function getURLParameter(url2, name) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + return urlParsed.getQueryParameterValue(name); + } + __name(getURLParameter, "getURLParameter"); + function setURLHost(url2, host) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + urlParsed.setHost(host); + return urlParsed.toString(); + } + __name(setURLHost, "setURLHost"); + function getURLPath(url2) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + return urlParsed.getPath(); + } + __name(getURLPath, "getURLPath"); + function getURLScheme(url2) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + return urlParsed.getScheme(); + } + __name(getURLScheme, "getURLScheme"); + function getURLPathAndQuery(url2) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; + } + return `${pathString}${queryString}`; + } + __name(getURLPathAndQuery, "getURLPathAndQuery"); + function getURLQueries(url2) { + let queryString = coreHttp.URLBuilder.parse(url2).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; + } + __name(getURLQueries, "getURLQueries"); + function appendToURLQuery(url2, queryParts) { + const urlParsed = coreHttp.URLBuilder.parse(url2); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); + } + __name(appendToURLQuery, "appendToURLQuery"); + function truncatedISO8061Date(date, withMilliseconds = true) { + const dateString = date.toISOString(); + return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; + } + __name(truncatedISO8061Date, "truncatedISO8061Date"); + function base64encode(content) { + return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); + } + __name(base64encode, "base64encode"); + function generateBlockID(blockIDPrefix, blockIndex) { + const maxSourceStringLength = 48; + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); + } + __name(generateBlockID, "generateBlockID"); + async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + let timeout; + const abortHandler = /* @__PURE__ */ __name(() => { + if (timeout !== void 0) { + clearTimeout(timeout); + } + reject(abortError); + }, "abortHandler"); + const resolveHandler = /* @__PURE__ */ __name(() => { + if (aborter !== void 0) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }, "resolveHandler"); + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== void 0) { + aborter.addEventListener("abort", abortHandler); } - }, - queryParameters: [comp, restype1], - urlParameters: [url], - headerParameters: [version3, accept1], - isXML: true, - serializer: xmlSerializer$4 - }; - var Blob$1 = class { - static { - __name(this, "Blob$1"); + }); + } + __name(delay, "delay"); + function padStart(currentString, targetLength, padString = " ") { + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); } - /** - * Initialize a new instance of the class Blob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; } - /** - * The Download operation reads or downloads a blob from the system, including its metadata and - * properties. You can also call Download to read a snapshot. - * @param options The options parameters. - */ - download(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + __name(padStart, "padStart"); + function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); + } + __name(iEqual, "iEqual"); + function getAccountNameFromUrl(url2) { + const parsedUrl = coreHttp.URLBuilder.parse(url2); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + accountName = parsedUrl.getHost().split(".")[0]; + } else if (isIpEndpointStyle(parsedUrl)) { + accountName = parsedUrl.getPath().split("/")[1]; + } else { + accountName = ""; + } + return accountName; + } catch (error) { + throw new Error("Unable to extract accountName with provided information."); } - /** - * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system - * properties for the blob. It does not return the content of the blob. - * @param options The options parameters. - */ - getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + __name(getAccountNameFromUrl, "getAccountNameFromUrl"); + function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === void 0) { + return false; } - /** - * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - * permanently removed from the storage account. If the storage account's soft delete feature is - * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - * immediately. However, the blob service retains the blob or snapshot for the number of days specified - * by the DeleteRetentionPolicy section of [Storage service properties] - * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is - * permanently removed from the storage account. Note that you continue to be charged for the - * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the - * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You - * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a - * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 - * (ResourceNotFound). - * @param options The options parameters. - */ - delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + const host = parsedUrl.getHost() + (parsedUrl.getPort() === void 0 ? "" : ":" + parsedUrl.getPort()); + return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || parsedUrl.getPort() !== void 0 && PathStylePorts.includes(parsedUrl.getPort()); + } + __name(isIpEndpointStyle, "isIpEndpointStyle"); + function toBlobTagsString(tags2) { + if (tags2 === void 0) { + return void 0; } - /** - * Undelete a blob that was previously soft deleted - * @param options The options parameters. - */ - undelete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + const tagPairs = []; + for (const key in tags2) { + if (Object.prototype.hasOwnProperty.call(tags2, key)) { + const value = tags2[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } } - /** - * Sets the time a blob will expire and be deleted. - * @param expiryOptions Required. Indicates mode of the expiry time - * @param options The options parameters. - */ - setExpiry(expiryOptions2, options) { - const operationArguments = { - expiryOptions: expiryOptions2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + return tagPairs.join("&"); + } + __name(toBlobTagsString, "toBlobTagsString"); + function toBlobTags(tags2) { + if (tags2 === void 0) { + return void 0; } - /** - * The Set HTTP Headers operation sets system properties on the blob - * @param options The options parameters. - */ - setHttpHeaders(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + const res = { + blobTagSet: [] + }; + for (const key in tags2) { + if (Object.prototype.hasOwnProperty.call(tags2, key)) { + const value = tags2[key]; + res.blobTagSet.push({ + key, + value + }); + } } - /** - * The Set Immutability Policy operation sets the immutability policy on the blob - * @param options The options parameters. - */ - setImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + return res; + } + __name(toBlobTags, "toBlobTags"); + function toTags(tags2) { + if (tags2 === void 0) { + return void 0; } - /** - * The Delete Immutability Policy operation deletes the immutability policy on the blob - * @param options The options parameters. - */ - deleteImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + const res = {}; + for (const blobTag of tags2.blobTagSet) { + res[blobTag.key] = blobTag.value; } - /** - * The Set Legal Hold operation sets a legal hold on the blob. - * @param legalHold Specified if a legal hold should be set on the blob. - * @param options The options parameters. - */ - setLegalHold(legalHold2, options) { - const operationArguments = { - legalHold: legalHold2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + return res; + } + __name(toTags, "toTags"); + function toQuerySerialization(textConfiguration) { + if (textConfiguration === void 0) { + return void 0; } - /** - * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more - * name-value pairs - * @param options The options parameters. - */ - setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false + } + } + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator + } + } + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema + } + } + }; + case "parquet": + return { + format: { + type: "parquet" + } + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + __name(toQuerySerialization, "toQuerySerialization"); + function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return void 0; } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId2, options) { - const operationArguments = { - leaseId: leaseId2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + if ("policy-id" in objectReplicationRecord) { + return void 0; } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId2, options) { - const operationArguments = { - leaseId: leaseId2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key] }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } else { + orProperties.push({ + policyId: ids[0], + rules: [rule] + }); + } } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId2, proposedLeaseId2, options) { - const operationArguments = { - leaseId: leaseId2, - proposedLeaseId: proposedLeaseId2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return orProperties; + } + __name(parseObjectReplicationRecord, "parseObjectReplicationRecord"); + function attachCredential(thing, credential) { + thing.credential = credential; + return thing; + } + __name(attachCredential, "attachCredential"); + function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; + } + __name(httpAuthorizationToString, "httpAuthorizationToString"); + function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } else { + return name.content; } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + __name(BlobNameToString, "BlobNameToString"); + function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }) + } }); + } + __name(ConvertInternalResponseOfListBlobFlat, "ConvertInternalResponseOfListBlobFlat"); + function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }) + } }); + } + __name(ConvertInternalResponseOfListBlobHierarchy, "ConvertInternalResponseOfListBlobHierarchy"); + function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false + }; + ++pageRangeIndex; + } else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true + }; + ++clearRangeIndex; + } } - /** - * The Create Snapshot operation creates a read-only snapshot of a blob - * @param options The options parameters. - */ - createSnapshot(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false }; - return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } - /** - * The Start Copy From URL operation copies a blob or an internet resource to a new blob. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - startCopyFromURL(copySource2, options) { - const operationArguments = { - copySource: copySource2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true }; - return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } - /** - * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return - * a response until the copy is complete. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - copyFromURL(copySource2, options) { - const operationArguments = { - copySource: copySource2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + __name(ExtractPageRangeInfoItems, "ExtractPageRangeInfoItems"); + function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); + } + return split.join("/"); + } + __name(EscapePath, "EscapePath"); + var StorageBrowserPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "StorageBrowserPolicy"); } /** - * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination - * blob with zero length and full metadata. - * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob - * operation. - * @param options The options parameters. + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - */ - abortCopyFromURL(copyId2, options) { - const operationArguments = { - copyId: copyId2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); } /** - * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant storage only). A - * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block - * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's - * ETag. - * @param tier Indicates the tier to be set on the blob. - * @param options The options parameters. + * Sends out request. + * + * @param request - */ - setTier(tier2, options) { - const operationArguments = { - tier: tier2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + async sendRequest(request) { + if (coreHttp.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } + }; + var StorageBrowserPolicyFactory = class { + static { + __name(this, "StorageBrowserPolicyFactory"); } /** - * Returns the sku name and account kind - * @param options The options parameters. + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - */ - getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } + }; + exports2.StorageRetryPolicyType = void 0; + (function(StorageRetryPolicyType) { + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; + })(exports2.StorageRetryPolicyType || (exports2.StorageRetryPolicyType = {})); + var DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1e3, + maxTries: 4, + retryDelayInMs: 4 * 1e3, + retryPolicyType: exports2.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: void 0 + // Use server side default timeout strategy + }; + var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); + var StorageRetryPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "StorageRetryPolicy"); } /** - * The Query operation enables users to select/project on blob data by providing simple query - * expressions. - * @param options The options parameters. + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - */ - query(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost }; - return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } /** - * The Get Tags operation enables users to get the tags associated with a blob. - * @param options The options parameters. + * Sends request. + * + * @param request - */ - getTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); } /** - * The Set Tags operation enables users to set tags on a blob. - * @param options The options parameters. + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. */ - setTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); - } - }; - var xmlSerializer$3 = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - true - ); - var downloadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - rangeGetContentMD5, - rangeGetContentCRC64, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var getPropertiesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "HEAD", - responses: { - 200: { - headersMapper: BlobGetPropertiesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var deleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: BlobDeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - blobDeleteType - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - deleteSnapshots - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var undeleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobUndeleteHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp8], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setExpiryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetExpiryHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); } - }, - queryParameters: [timeoutInSeconds, comp11], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - expiryOptions, - expiresOn - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setHttpHeadersOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetHttpHeadersHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; + } catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } } - }, - queryParameters: [timeoutInSeconds, comp12], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifUnmodifiedSince, - immutabilityPolicyExpiry, - immutabilityPolicyMode - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var deleteImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + return false; } - }, - queryParameters: [timeoutInSeconds, comp12], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setLegalHoldOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetLegalHoldHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR" + // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } } - }, - queryParameters: [timeoutInSeconds, comp13], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - legalHold - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setMetadataOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetMetadataHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } } - }, - queryParameters: [timeoutInSeconds, comp6], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var acquireLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobAcquireLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var releaseLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobReleaseLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports2.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports2.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } else { + delayTimeInMs = Math.random() * 1e3; } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } }; - var renewLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobRenewLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 + var StorageRetryPolicyFactory = class { + static { + __name(this, "StorageRetryPolicyFactory"); + } + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } }; - var changeLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobChangeLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 + var CredentialPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "CredentialPolicy"); + } + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + return request; + } }; - var breakLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobBreakLeaseHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 + var AnonymousCredentialPolicy = class extends CredentialPolicy { + static { + __name(this, "AnonymousCredentialPolicy"); + } + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } }; - var createSnapshotOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobCreateSnapshotHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds, comp14], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 + var Credential = class { + static { + __name(this, "Credential"); + } + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } }; - var startCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobStartCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders - } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - tier, - rehydratePriority, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sealBlob, - legalHold1 - ], - isXML: true, - serializer: xmlSerializer$3 + var AnonymousCredential = class extends Credential { + static { + __name(this, "AnonymousCredential"); + } + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } }; - var copyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders + var TelemetryPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "TelemetryPolicy"); + } + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - copySource, - blobTagsString, - legalHold1, - xMsRequiresSync, - sourceContentMD5, - copySourceAuthorization, - copySourceTags - ], - isXML: true, - serializer: xmlSerializer$3 + return this._nextPolicy.sendRequest(request); + } }; - var abortCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobAbortCopyFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders + var TelemetryPolicyFactory = class { + static { + __name(this, "TelemetryPolicyFactory"); + } + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (coreHttp.isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os__namespace) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } } - }, - queryParameters: [ - timeoutInSeconds, - comp15, - copyId - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - copyActionAbortConstant - ], - isXML: true, - serializer: xmlSerializer$3 + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } }; - var setTierOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetTierHeaders - }, - 202: { - headersMapper: BlobSetTierHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders - } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp16 - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifTags, - rehydratePriority, - tier1 - ], - isXML: true, - serializer: xmlSerializer$3 + var _defaultHttpClient = new coreHttp.DefaultHttpClient(); + function getCachedDefaultHttpClient() { + return _defaultHttpClient; + } + __name(getCachedDefaultHttpClient, "getCachedDefaultHttpClient"); + var Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization" + } }; - var getAccountInfoOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: BlobGetAccountInfoHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders - } - }, - queryParameters: [comp, restype1], - urlParameters: [url], - headerParameters: [version3, accept1], - isXML: true, - serializer: xmlSerializer$3 + var DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1e3, + retryIntervalInMs: 3e3, + refreshWindowInMs: 1e3 * 60 * 2 + // Start refreshing 2m before expiry }; - var queryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders + async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } catch (_a) { + return null; + } + } else { + const finalToken = await getAccessToken(); + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + __name(tryGetAccessToken, "tryGetAccessToken"); + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; + } + __name(beginRefresh, "beginRefresh"); + function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return !cycler.isRefreshing && ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now(); }, - default: { - bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now(); } - }, - requestBody: queryRequest, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp17 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version3, - requestId, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 - }; - var getTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders + }; + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + const tryGetAccessToken = /* @__PURE__ */ __name(() => credential.getToken(scopes, getTokenOptions), "tryGetAccessToken"); + refreshWorker = beginRefresh( + tryGetAccessToken, + options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now() + ).then((_token) => { + refreshWorker = null; + token = _token; + return token; + }).catch((reason) => { + refreshWorker = null; + token = null; + throw reason; + }); } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 - }; - var setTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobSetTagsHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders + return refreshWorker; + } + __name(refresh, "refresh"); + return async (tokenOptions) => { + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); } - }, - requestBody: tags, - queryParameters: [ - timeoutInSeconds, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version3, - requestId, - leaseId, - ifTags, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 - }; - var PageBlob = class { + return token; + }; + } + __name(createTokenCycler, "createTokenCycler"); + function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; + } + __name(getChallenge, "getChallenge"); + function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + return keyValuePairs.reduce((a, b) => Object.assign(Object.assign({}, a), b), {}); + } + __name(parseChallenge, "parseChallenge"); + function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + static { + __name(this, "StorageBearerTokenChallengeAuthenticationPolicy"); + } + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext + } + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext + }, + tenantId + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: /* @__PURE__ */ __name((nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, "create") + }; + } + __name(storageBearerTokenChallengeAuthenticationPolicy, "storageBearerTokenChallengeAuthenticationPolicy"); + function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; + } + __name(isPipelineLike, "isPipelineLike"); + var Pipeline = class { static { - __name(this, "PageBlob"); + __name(this, "Pipeline"); } /** - * Initialize a new instance of the class PageBlob class. - * @param client Reference to the service client + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - */ - constructor(client) { - this.client = client; + constructor(factories, options = {}) { + this.factories = factories; + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); } /** - * The Create operation creates a new page blob. - * @param contentLength The length of the request. - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. */ - create(contentLength2, blobContentLength2, options) { - const operationArguments = { - contentLength: contentLength2, - blobContentLength: blobContentLength2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + } + }; + function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === void 0) { + credential = new AnonymousCredential(); + } + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + coreHttp.generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + coreHttp.deserializationPolicy(void 0, { xmlCharKey: "#" }), + coreHttp.logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters + }) + ]; + if (coreHttp.isNode) { + factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(coreHttp.disableResponseDecompressionPolicy()); + } + factories.push(coreHttp.isTokenCredential(credential) ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); + return new Pipeline(factories, pipelineOptions); + } + __name(newPipeline, "newPipeline"); + var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy { + static { + __name(this, "StorageSharedKeyCredentialPolicy"); } /** - * The Upload Pages operation writes a range of pages to a page blob - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - */ - uploadPages(contentLength2, body2, options) { - const operationArguments = { - contentLength: contentLength2, - body: body2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; } /** - * The Clear Pages operation clears a set of pages from a page blob - * @param contentLength The length of the request. - * @param options The options parameters. + * Signs request. + * + * @param request - */ - clearPages(contentLength2, options) { - const operationArguments = { - contentLength: contentLength2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); + if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE) + ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + return request; } /** - * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a - * URL - * @param sourceUrl Specify a URL to the copy source. - * @param sourceRange Bytes of source data in the specified range. The length of this range should - * match the ContentLength header and x-ms-range/Range destination range header. - * @param contentLength The length of the request. - * @param range The range of bytes to which the source range would be written. The range should be 512 - * aligned and range-end is required. - * @param options The options parameters. + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - */ - uploadPagesFromURL(sourceUrl2, sourceRange2, contentLength2, range2, options) { - const operationArguments = { - sourceUrl: sourceUrl2, - sourceRange: sourceRange2, - contentLength: contentLength2, - range: range2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; } /** - * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a - * page blob - * @param options The options parameters. + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - */ - getPageRanges(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} +`; + }); + return canonicalizedHeadersStringToSign; } /** - * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were - * changed between target blob and previous snapshot. - * @param options The options parameters. + * Retrieves the webResource canonicalized resource string. + * + * @param request - */ - getPageRangesDiff(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + getCanonicalizedResourceString(request) { + const path2 = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path2}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += ` +${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + }; + var StorageSharedKeyCredential = class extends Credential { + static { + __name(this, "StorageSharedKeyCredential"); + } + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto4.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } + }; + var packageName = "azure-storage-blob"; + var packageVersion = "12.17.0"; + var StorageClientContext = class extends coreHttp__namespace.ServiceClient { + static { + __name(this, "StorageClientContext"); + } + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url2, options) { + if (url2 === void 0) { + throw new Error("'url' cannot be null"); + } + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(void 0, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + this.url = url2; + this.version = options.version || "2023-11-03"; + } + }; + var StorageClient = class { + static { + __name(this, "StorageClient"); } /** - * Resize the Blob - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. */ - resize(blobContentLength2, options) { - const operationArguments = { - blobContentLength: blobContentLength2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + constructor(url2, pipeline) { + this.url = escapeURLPath(url2); + this.accountName = getAccountNameFromUrl(url2); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if (coreHttp.isNode && factory instanceof StorageSharedKeyCredential || factory instanceof AnonymousCredential) { + this.credential = factory; + } else if (coreHttp.isTokenCredential(factory.credential)) { + this.credential = factory.credential; + } + } + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = void 0; + } + }; + var createSpan = coreTracing.createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage" + }); + function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext + }; + } + __name(convertTracingToRequestOptionsBase, "convertTracingToRequestOptionsBase"); + var BlobSASPermissions = class _BlobSASPermissions { + static { + __name(this, "BlobSASPermissions"); + } + constructor() { + this.read = false; + this.add = false; + this.create = false; + this.write = false; + this.delete = false; + this.deleteVersion = false; + this.tag = false; + this.move = false; + this.execute = false; + this.setImmutabilityPolicy = false; + this.permanentDelete = false; } /** - * Update the sequence number of the blob - * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. - * This property applies to page blobs only. This property indicates how the service should modify the - * blob's sequence number - * @param options The options parameters. + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - */ - updateSequenceNumber(sequenceNumberAction2, options) { - const operationArguments = { - sequenceNumberAction: sequenceNumberAction2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + static parse(permissions) { + const blobSASPermissions = new _BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; } /** - * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. - * The snapshot is copied such that only the differential changes between the previously copied - * snapshot are transferred to the destination. The copied snapshots are complete copies of the - * original snapshot and can be read or copied from as usual. This API is supported since REST version - * 2016-05-31. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - */ - copyIncremental(copySource2, options) { - const operationArguments = { - copySource: copySource2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); - } - }; - var xmlSerializer$2 = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - true - ); - var serializer$2 = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - false - ); - var createOperationSpec$1 = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders + static from(permissionLike) { + const blobSASPermissions = new _BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - blobType, - blobContentLength, - blobSequenceNumber - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders + if (permissionLike.add) { + blobSASPermissions.add = true; } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo - ], - mediaType: "binary", - serializer: serializer$2 - }; - var clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobClearPagesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders + if (permissionLike.create) { + blobSASPermissions.create = true; } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - pageWrite1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders + if (permissionLike.write) { + blobSASPermissions.write = true; } - }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - sourceUrl, - sourceRange, - sourceContentCrc64, - range1 - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders + if (permissionLike.delete) { + blobSASPermissions.delete = true; } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20 - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; } - }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20, - prevsnapshot - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags, - prevSnapshotUrl - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobResizeHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders + if (permissionLike.tag) { + blobSASPermissions.tag = true; } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - blobContentLength - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + if (permissionLike.move) { + blobSASPermissions.move = true; } - }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobSequenceNumber, - sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: PageBlobCopyIncrementalHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders + if (permissionLike.execute) { + blobSASPermissions.execute = true; } - }, - queryParameters: [timeoutInSeconds, comp21], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - copySource - ], - isXML: true, - serializer: xmlSerializer$2 - }; - var AppendBlob = class { - static { - __name(this, "AppendBlob"); + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; } /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions */ - constructor(client) { - this.client = client; + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); } - /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - create(contentLength2, options) { - const operationArguments = { - contentLength: contentLength2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + }; + var ContainerSASPermissions = class _ContainerSASPermissions { + static { + __name(this, "ContainerSASPermissions"); + } + constructor() { + this.read = false; + this.add = false; + this.create = false; + this.write = false; + this.delete = false; + this.deleteVersion = false; + this.list = false; + this.tag = false; + this.move = false; + this.execute = false; + this.setImmutabilityPolicy = false; + this.permanentDelete = false; + this.filterByTags = false; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - */ - appendBlock(contentLength2, body2, options) { - const operationArguments = { - contentLength: contentLength2, - body: body2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + static parse(permissions) { + const containerSASPermissions = new _ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - */ - appendBlockFromUrl(sourceUrl2, contentLength2, options) { - const operationArguments = { - sourceUrl: sourceUrl2, - contentLength: contentLength2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + static from(permissionLike) { + const containerSASPermissions = new _ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; } /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * */ - seal(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, sealOperationSpec); - } - }; - var xmlSerializer$1 = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - true - ); - var serializer$1 = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - false - ); - var createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobCreateHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - blobTagsString, - legalHold1, - blobType1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var appendBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders + if (this.add) { + permissions.push("a"); } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - maxSize, - appendPosition - ], - mediaType: "binary", - serializer: serializer$1 - }; - var appendBlockFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders + if (this.create) { + permissions.push("c"); } - }, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - transactionalContentMD5, - sourceUrl, - sourceContentCrc64, - maxSize, - appendPosition, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer$1 - }; - var sealOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: AppendBlobSealHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders + if (this.write) { + permissions.push("w"); } - }, - queryParameters: [timeoutInSeconds, comp23], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - appendPosition - ], - isXML: true, - serializer: xmlSerializer$1 + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } }; - var BlockBlob = class { + var UserDelegationKeyCredential = class { static { - __name(this, "BlockBlob"); + __name(this, "UserDelegationKeyCredential"); } /** - * Initialize a new instance of the class BlockBlob class. - * @param client Reference to the service client + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - */ - constructor(client) { - this.client = client; + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); } /** - * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing - * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put - * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a - * partial update of the content of a block blob, use the Put Block List operation. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - */ - upload(contentLength2, body2, options) { - const operationArguments = { - contentLength: contentLength2, - body: body2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + computeHMACSHA256(stringToSign) { + return crypto4.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } + }; + function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; + } + __name(ipRangeToString, "ipRangeToString"); + exports2.SASProtocol = void 0; + (function(SASProtocol) { + SASProtocol["Https"] = "https"; + SASProtocol["HttpsAndHttp"] = "https,http"; + })(exports2.SASProtocol || (exports2.SASProtocol = {})); + var SASQueryParameters = class { + static { + __name(this, "SASQueryParameters"); + } + constructor(version4, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { + this.version = version4; + this.signature = signature; + if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn2; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope2; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType2; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } } /** - * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read - * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are - * not supported with Put Blob from URL; the content of an existing blob is overwritten with the - * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, - * use the Put Block from URL API in conjunction with Put Block List. - * @param contentLength The length of the request. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. + * Optional. IP range allowed for this SAS. + * + * @readonly */ - putBlobFromUrl(contentLength2, copySource2, options) { - const operationArguments = { - contentLength: contentLength2, - copySource: copySource2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start + }; + } + return void 0; } /** - * The Stage Block operation creates a new block to be committed as part of a blob - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. + * Encodes all SAS query parameters into a string that can be appended to a URL. + * */ - stageBlock(blockId2, contentLength2, body2, options) { - const operationArguments = { - blockId: blockId2, - contentLength: contentLength2, - body: body2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid" + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : void 0); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : void 0); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : void 0); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : void 0); + break; + case "ske": + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : void 0); + break; + case "sks": + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); } /** - * The Stage Block operation creates a new block to be committed as part of a blob where the contents - * are read from a URL. - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param sourceUrl Specify a URL to the copy source. - * @param options The options parameters. + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - */ - stageBlockFromURL(blockId2, contentLength2, sourceUrl2, options) { - const operationArguments = { - blockId: blockId2, - contentLength: contentLength2, - sourceUrl: sourceUrl2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } } - /** - * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the - * blob. In order to be written as part of a blob, a block must have been successfully written to the - * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading - * only those blocks that have changed, then committing the new and existing blocks together. You can - * do this by specifying whether to commit a block from the committed block list or from the - * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list - * it may belong to. - * @param blocks Blob Blocks. - * @param options The options parameters. - */ - commitBlockList(blocks2, options) { - const operationArguments = { - blocks: blocks2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + }; + function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; + let userDelegationKeyCredential; + if (sharedKeyCredential === void 0 && accountName !== void 0) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } - /** - * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block - * blob - * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted - * blocks, or both lists together. - * @param options The options parameters. - */ - getBlockList(listType2, options) { - const operationArguments = { - listType: listType2, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } - }; - var xmlSerializer = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - true - ); - var serializer = new coreHttp__namespace.Serializer( - Mappers, - /* isXml */ - false - ); - var uploadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobUploadHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders + if (version4 >= "2020-12-06") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); } - }, - requestBody: body1, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - blobType2 - ], - mediaType: "binary", - serializer - }; - var putBlobFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders + } + if (version4 >= "2018-11-09") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } else { + if (version4 >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } } - }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sourceContentMD5, - copySourceAuthorization, - copySourceTags, - transactionalContentMD5, - blobType2, - copySourceBlobProperties - ], - isXML: true, - serializer: xmlSerializer - }; - var stageBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders + } + if (version4 >= "2015-04-05") { + if (sharedKeyCredential !== void 0) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); + } + __name(generateBlobSASQueryParameters, "generateBlobSASQueryParameters"); + function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); + } + __name(generateBlobSASQueryParameters20150405, "generateBlobSASQueryParameters20150405"); + function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); + } + __name(generateBlobSASQueryParameters20181109, "generateBlobSASQueryParameters20181109"); + function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope); + } + __name(generateBlobSASQueryParameters20201206, "generateBlobSASQueryParameters20201206"); + function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - requestBody: body1, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2 - ], - mediaType: "binary", - serializer - }; - var stageBlockFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - sourceUrl, - sourceContentCrc64, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer - }; - var commitBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobCommitBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); + } + __name(generateBlobSASQueryParametersUDK20181109, "generateBlobSASQueryParametersUDK20181109"); + function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; } - }, - requestBody: blocks, - queryParameters: [timeoutInSeconds, comp25], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version3, - requestId, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer - }; - var getBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders - }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } - }, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp25, - listType - ], - urlParameters: [url], - headerParameters: [ - version3, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer - }; - var logger = logger$1.createClientLogger("storage-blob"); - var SDK_VERSION = "12.17.0"; - var SERVICE_VERSION = "2023-11-03"; - var BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; - var BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; - var BLOCK_BLOB_MAX_BLOCKS = 5e4; - var DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; - var DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; - var DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; - var REQUEST_TIMEOUT = 100 * 1e3; - var StorageOAuthScopes = "https://storage.azure.com/.default"; - var URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout" } - }; - var HTTPURLConnection = { - HTTP_ACCEPTED: 202, - HTTP_CONFLICT: 409, - HTTP_NOT_FOUND: 404, - HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 - }; - var HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" - }; - var ETagNone = ""; - var ETagAny = "*"; - var SIZE_1_MB = 1 * 1024 * 1024; - var BATCH_MAX_REQUEST = 256; - var BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; - var HTTP_LINE_ENDING = "\r\n"; - var HTTP_VERSION_1_1 = "HTTP/1.1"; - var EncryptionAlgorithmAES25 = "AES256"; - var DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; - var StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-if-tags", - "x-ms-source-if-tags" - ]; - var StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot" - ]; - var BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; - var BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; - var PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104" - ]; - function escapeURLPath(url2) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - let path2 = urlParsed.getPath(); - path2 = path2 || "/"; - path2 = escape(path2); - urlParsed.setPath(path2); - return urlParsed.toString(); + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); + } + __name(generateBlobSASQueryParametersUDK20200210, "generateBlobSASQueryParametersUDK20200210"); + function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", + blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + void 0, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); } - __name(escapeURLPath, "escapeURLPath"); - function getProxyUriFromDevConnString(connectionString) { - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } + __name(generateBlobSASQueryParametersUDK20201206, "generateBlobSASQueryParametersUDK20201206"); + function getCanonicalName(accountName, containerName, blobName) { + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); } - return proxyUri; + return elements.join(""); } - __name(getProxyUriFromDevConnString, "getProxyUriFromDevConnString"); - function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } + __name(getCanonicalName, "getCanonicalName"); + function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version4 < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } - return ""; + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version4 < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version4 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version4 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version4 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version4 < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version4; + return blobSASSignatureValues; } - __name(getValueInConnString, "getValueInConnString"); - function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; + __name(SASSignatureValuesSanityCheckAndAutofill, "SASSignatureValuesSanityCheckAndAutofill"); + var BlobLeaseClient = class { + static { + __name(this, "BlobLeaseClient"); } - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); - } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId2) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === void 0) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } else { + this._isContainer = false; + this._containerOrBlobOperation = new Blob$1(clientContext); } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); - } else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); + if (!leaseId2) { + leaseId2 = coreHttp.generateUuid(); } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri - }; - } else { - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); + this._leaseId = leaseId2; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration2, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); - } else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration: duration2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } - } - __name(extractConnectionStringParts, "extractConnectionStringParts"); - function escape(text) { - return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); - } - __name(escape, "escape"); - function appendToURLPath(url2, name) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - let path2 = urlParsed.getPath(); - path2 = path2 ? path2.endsWith("/") ? `${path2}${name}` : `${path2}/${name}` : name; - urlParsed.setPath(path2); - const normalizedUrl = new URL(urlParsed.toString()); - return normalizedUrl.toString(); - } - __name(appendToURLPath, "appendToURLPath"); - function setURLParameter(url2, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - urlParsed.setQueryParameter(name, value); - return urlParsed.toString(); - } - __name(setURLParameter, "setURLParameter"); - function getURLParameter(url2, name) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - return urlParsed.getQueryParameterValue(name); - } - __name(getURLParameter, "getURLParameter"); - function setURLHost(url2, host) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - urlParsed.setHost(host); - return urlParsed.toString(); - } - __name(setURLHost, "setURLHost"); - function getURLPath(url2) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - return urlParsed.getPath(); - } - __name(getURLPath, "getURLPath"); - function getURLScheme(url2) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - return urlParsed.getScheme(); - } - __name(getURLScheme, "getURLScheme"); - function getURLPathAndQuery(url2) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - const pathString = urlParsed.getPath(); - if (!pathString) { - throw new RangeError("Invalid url without valid path."); + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId2, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId2; + return response; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod2, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod: breakPeriod2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + }; + var RetriableReadableStream = class extends stream.Readable { + static { + __name(this, "RetriableReadableStream"); + } + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = void 0; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } else if (this.offset <= this.end) { + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset).then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }).catch((error) => { + this.destroy(error); + }); + } else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? void 0 : error); + } + }; + var BlobDownloadResponse = class { + static { + __name(this, "BlobDownloadResponse"); + } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; } - let queryString = urlParsed.getQuery() || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; } - return `${pathString}${queryString}`; - } - __name(getURLPathAndQuery, "getURLPathAndQuery"); - function getURLQueries(url2) { - let queryString = coreHttp.URLBuilder.parse(url2).getQuery(); - if (!queryString) { - return {}; + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; } - return queries; - } - __name(getURLQueries, "getURLQueries"); - function appendToURLQuery(url2, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url2); - let query = urlParsed.getQuery(); - if (query) { - query += "&" + queryParts; - } else { - query = queryParts; + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; } - urlParsed.setQuery(query); - return urlParsed.toString(); - } - __name(appendToURLQuery, "appendToURLQuery"); - function truncatedISO8061Date(date, withMilliseconds = true) { - const dateString = date.toISOString(); - return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; - } - __name(truncatedISO8061Date, "truncatedISO8061Date"); - function base64encode(content) { - return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); - } - __name(base64encode, "base64encode"); - function generateBlockID(blockIDPrefix, blockIndex) { - const maxSourceStringLength = 48; - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; } - const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); - } - __name(generateBlockID, "generateBlockID"); - async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - let timeout; - const abortHandler = /* @__PURE__ */ __name(() => { - if (timeout !== void 0) { - clearTimeout(timeout); - } - reject(abortError); - }, "abortHandler"); - const resolveHandler = /* @__PURE__ */ __name(() => { - if (aborter !== void 0) { - aborter.removeEventListener("abort", abortHandler); - } - resolve(); - }, "resolveHandler"); - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== void 0) { - aborter.addEventListener("abort", abortHandler); - } - }); - } - __name(delay, "delay"); - function padStart(currentString, targetLength, padString = " ") { - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; - } else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); - } - return padString.slice(0, targetLength) + currentString; + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; } - } - __name(padStart, "padStart"); - function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); - } - __name(iEqual, "iEqual"); - function getAccountNameFromUrl(url2) { - const parsedUrl = coreHttp.URLBuilder.parse(url2); - let accountName; - try { - if (parsedUrl.getHost().split(".")[1] === "blob") { - accountName = parsedUrl.getHost().split(".")[0]; - } else if (isIpEndpointStyle(parsedUrl)) { - accountName = parsedUrl.getPath().split("/")[1]; - } else { - accountName = ""; - } - return accountName; - } catch (error) { - throw new Error("Unable to extract accountName with provided information."); + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; } - } - __name(getAccountNameFromUrl, "getAccountNameFromUrl"); - function isIpEndpointStyle(parsedUrl) { - if (parsedUrl.getHost() === void 0) { - return false; + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; } - const host = parsedUrl.getHost() + (parsedUrl.getPort() === void 0 ? "" : ":" + parsedUrl.getPort()); - return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || parsedUrl.getPort() !== void 0 && PathStylePorts.includes(parsedUrl.getPort()); - } - __name(isIpEndpointStyle, "isIpEndpointStyle"); - function toBlobTagsString(tags2) { - if (tags2 === void 0) { - return void 0; + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; } - const tagPairs = []; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); - } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; } - return tagPairs.join("&"); - } - __name(toBlobTagsString, "toBlobTagsString"); - function toBlobTags(tags2) { - if (tags2 === void 0) { - return void 0; + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; } - const res = { - blobTagSet: [] - }; - for (const key in tags2) { - if (Object.prototype.hasOwnProperty.call(tags2, key)) { - const value = tags2[key]; - res.blobTagSet.push({ - key, - value - }); - } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; } - return res; - } - __name(toBlobTags, "toBlobTags"); - function toTags(tags2) { - if (tags2 === void 0) { - return void 0; + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; } - const res = {}; - for (const blobTag of tags2.blobTagSet) { - res[blobTag.key] = blobTag.value; + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; } - return res; - } - __name(toTags, "toTags"); - function toQuerySerialization(textConfiguration) { - if (textConfiguration === void 0) { - return void 0; + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema - } - } - }; - case "parquet": - return { - format: { - type: "parquet" - } - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; } - } - __name(toQuerySerialization, "toQuerySerialization"); - function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return void 0; + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } - if ("policy-id" in objectReplicationRecord) { - return void 0; + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); - } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); - } else { - orProperties.push({ - policyId: ids[0], - rules: [rule] - }); - } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; } - return orProperties; - } - __name(parseObjectReplicationRecord, "parseObjectReplicationRecord"); - function attachCredential(thing, credential) { - thing.credential = credential; - return thing; - } - __name(attachCredential, "attachCredential"); - function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; - } - __name(httpAuthorizationToString, "httpAuthorizationToString"); - function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); - } else { - return name.content; + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; } - } - __name(BlobNameToString, "BlobNameToString"); - function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - __name(ConvertInternalResponseOfListBlobFlat, "ConvertInternalResponseOfListBlobFlat"); - function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }) - } }); - } - __name(ConvertInternalResponseOfListBlobHierarchy, "ConvertInternalResponseOfListBlobHierarchy"); - function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; - ++pageRangeIndex; - } else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; - ++clearRangeIndex; - } + /** + * Returns the date and time the blob was created. + * + * @readonly + */ + get createdOn() { + return this.originalResponse.createdOn; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false - }; + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true - }; + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; } - } - __name(ExtractPageRangeInfoItems, "ExtractPageRangeInfoItems"); - function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; } - return split.join("/"); - } - __name(EscapePath, "EscapePath"); - var StorageBrowserPolicy = class extends coreHttp.BaseRequestPolicy { - static { - __name(this, "StorageBrowserPolicy"); + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; } /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } /** - * Sends out request. + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. * - * @param request - + * @readonly */ - async sendRequest(request) { - if (coreHttp.isNode) { - return this._nextPolicy.sendRequest(request); - } - if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, (/* @__PURE__ */ new Date()).getTime().toString()); - } - request.headers.remove(HeaderConstants.COOKIE); - request.headers.remove(HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request); + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; } - }; - var StorageBrowserPolicyFactory = class { - static { - __name(this, "StorageBrowserPolicyFactory"); + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; } /** - * Creates a StorageBrowserPolicyFactory object. + * If this blob has been sealed. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); + get isSealed() { + return this.originalResponse.isSealed; } - }; - exports2.StorageRetryPolicyType = void 0; - (function(StorageRetryPolicyType) { - StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; - StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; - })(exports2.StorageRetryPolicyType || (exports2.StorageRetryPolicyType = {})); - var DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1e3, - maxTries: 4, - retryDelayInMs: 4 * 1e3, - retryPolicyType: exports2.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: void 0 - // Use server side default timeout strategy - }; - var RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); - var StorageRetryPolicy = class extends coreHttp.BaseRequestPolicy { - static { - __name(this, "StorageRetryPolicy"); + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; } /** - * Creates an instance of RetryPolicy. + * Indicates immutability policy mode. * - * @param nextPolicy - - * @param options - - * @param retryOptions - + * @readonly */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS.secondaryHost - }; + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; } /** - * Sends request. + * Indicates if a legal hold is present on the blob. * - * @param request - + * @readonly */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); + get legalHold() { + return this.originalResponse.legalHold; } /** - * Decide and perform next retry. Won't mutate request parameter. + * The response body as a browser Blob. + * Always undefined in node.js. * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. + * @readonly */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; - } catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : void 0; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + }; + var AVRO_SYNC_MARKER_SIZE = 16; + var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); + var AVRO_CODEC_KEY = "avro.codec"; + var AVRO_SCHEMA_KEY = "avro.schema"; + var AvroParser = class _AvroParser { + static { + __name(this, "AvroParser"); + } + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream2, length, options = {}) { + const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); + return bytes; } /** - * Decide whether to retry according to last HTTP response and retry counters. + * Reads a single byte from the stream. * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - + * @param stream - + * @param options - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); + static async readByte(stream2, options = {}) { + const buf = await _AvroParser.readFixedBytes(stream2, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream2, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await _AvroParser.readByte(stream2, options); + haveMoreByte = byte & 128; + zigZagEncoded |= (byte & 127) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); + if (haveMoreByte) { + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; + do { + byte = await _AvroParser.readByte(stream2, options); + zigZagEncoded += (byte & 127) * significanceInFloat; + significanceInFloat *= 128; + } while (byte & 128); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); + } + static async readLong(stream2, options = {}) { + return _AvroParser.readZigZagLong(stream2, options); + } + static async readInt(stream2, options = {}) { + return _AvroParser.readZigZagLong(stream2, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream2, options = {}) { + const b = await _AvroParser.readByte(stream2, options); + if (b === 1) { + return true; + } else if (b === 0) { return false; + } else { + throw new Error("Byte was not a boolean."); } - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR" - // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } + } + static async readFloat(stream2, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); + } + static async readDouble(stream2, options = {}) { + const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); + } + static async readBytes(stream2, options = {}) { + const size = await _AvroParser.readLong(stream2, options); + if (size < 0) { + throw new Error("Bytes size was negative."); } - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; + return stream2.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream2, options = {}) { + const u8arr = await _AvroParser.readBytes(stream2, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream2, readItemMethod, options = {}) { + const key = await _AvroParser.readString(stream2, options); + const value = await readItemMethod(stream2, options); + return { key, value }; + } + static async readMap(stream2, readItemMethod, options = {}) { + const readPairMethod = /* @__PURE__ */ __name((s, opts = {}) => { + return _AvroParser.readMapPair(s, readItemMethod, opts); + }, "readPairMethod"); + const pairs = await _AvroParser.readArray(stream2, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream2, readItemMethod, options = {}) { + const items = []; + for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { + if (count < 0) { + await _AvroParser.readLong(stream2, options); + count = -count; } - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; + while (count--) { + const item = await readItemMethod(stream2, options); + items.push(item); } } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; - } - return false; + return items; + } + }; + var AvroComplex; + (function(AvroComplex2) { + AvroComplex2["RECORD"] = "record"; + AvroComplex2["ENUM"] = "enum"; + AvroComplex2["ARRAY"] = "array"; + AvroComplex2["MAP"] = "map"; + AvroComplex2["UNION"] = "union"; + AvroComplex2["FIXED"] = "fixed"; + })(AvroComplex || (AvroComplex = {})); + var AvroPrimitive; + (function(AvroPrimitive2) { + AvroPrimitive2["NULL"] = "null"; + AvroPrimitive2["BOOLEAN"] = "boolean"; + AvroPrimitive2["INT"] = "int"; + AvroPrimitive2["LONG"] = "long"; + AvroPrimitive2["FLOAT"] = "float"; + AvroPrimitive2["DOUBLE"] = "double"; + AvroPrimitive2["BYTES"] = "bytes"; + AvroPrimitive2["STRING"] = "string"; + })(AvroPrimitive || (AvroPrimitive = {})); + var AvroType = class _AvroType { + static { + __name(this, "AvroType"); } /** - * Delay a calculated time between retries. - * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - + * Determines the AvroType from the Avro Schema. */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports2.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports2.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } + static fromSchema(schema) { + if (typeof schema === "string") { + return _AvroType.fromStringSchema(schema); + } else if (Array.isArray(schema)) { + return _AvroType.fromArraySchema(schema); } else { - delayTimeInMs = Math.random() * 1e3; + return _AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(_AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + try { + return _AvroType.fromStringSchema(type); + } catch (err) { + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = _AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(_AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: + // Unused today + case AvroComplex.FIXED: + // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } + }; + var AvroPrimitiveType = class extends AvroType { + static { + __name(this, "AvroPrimitiveType"); + } + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream2, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream2, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream2, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream2, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream2, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream2, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream2, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream2, options); + default: + throw new Error("Unknown Avro Primitive"); } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); } }; - var StorageRetryPolicyFactory = class { + var AvroEnumType = class extends AvroType { static { - __name(this, "StorageRetryPolicyFactory"); + __name(this, "AvroEnumType"); } - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; + constructor(symbols) { + super(); + this._symbols = symbols; } - /** - * Creates a StorageRetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + async read(stream2, options = {}) { + const value = await AvroParser.readInt(stream2, options); + return this._symbols[value]; } }; - var CredentialPolicy = class extends coreHttp.BaseRequestPolicy { + var AvroUnionType = class extends AvroType { static { - __name(this, "CredentialPolicy"); + __name(this, "AvroUnionType"); } - /** - * Sends out request. - * - * @param request - - */ - sendRequest(request) { - return this._nextPolicy.sendRequest(this.signRequest(request)); + constructor(types) { + super(); + this._types = types; } - /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. - * - * @param request - - */ - signRequest(request) { - return request; + async read(stream2, options = {}) { + const typeIndex = await AvroParser.readInt(stream2, options); + return this._types[typeIndex].read(stream2, options); } }; - var AnonymousCredentialPolicy = class extends CredentialPolicy { + var AvroMapType = class extends AvroType { static { - __name(this, "AnonymousCredentialPolicy"); + __name(this, "AvroMapType"); } - /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - - */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream2, options = {}) { + const readItemMethod = /* @__PURE__ */ __name((s, opts) => { + return this._itemType.read(s, opts); + }, "readItemMethod"); + return AvroParser.readMap(stream2, readItemMethod, options); } }; - var Credential = class { + var AvroRecordType = class extends AvroType { static { - __name(this, "Credential"); + __name(this, "AvroRecordType"); } - /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - - */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream2, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream2, options); + } + } + return record; } }; - var AnonymousCredential = class extends Credential { + function arraysEqual(a, b) { + if (a === b) + return true; + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; + } + __name(arraysEqual, "arraysEqual"); + var AvroReader = class { static { - __name(this, "AnonymousCredential"); + __name(this, "AvroReader"); } - /** - * Creates an {@link AnonymousCredentialPolicy} object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; } - }; - var TelemetryPolicy = class extends coreHttp.BaseRequestPolicy { - static { - __name(this, "TelemetryPolicy"); + get blockOffset() { + return this._blockOffset; } - /** - * Creates an instance of TelemetryPolicy. - * @param nextPolicy - - * @param options - - * @param telemetry - - */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; + get objectIndex() { + return this._objectIndex; } - /** - * Sends out request. - * - * @param request - - */ - async sendRequest(request) { - if (coreHttp.isNode) { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal + }); + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === void 0 || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + }); + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + }); + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; } } - return this._nextPolicy.sendRequest(request); } - }; - var TelemetryPolicyFactory = class { - static { - __name(this, "TelemetryPolicyFactory"); + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; } - /** - * Creates an instance of TelemetryPolicyFactory. - * @param telemetry - - */ - constructor(telemetry) { - const userAgentInfo = []; - if (coreHttp.isNode) { - if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; - if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { - userAgentInfo.push(telemetryString); - } - } - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; - if (userAgentInfo.indexOf(libInfo) === -1) { - userAgentInfo.push(libInfo); - } - let runtimeInfo = `(NODE-VERSION ${process.version})`; - if (os__namespace) { - runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + parseObjects(options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* parseObjects_1() { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); } - if (userAgentInfo.indexOf(runtimeInfo) === -1) { - userAgentInfo.push(runtimeInfo); + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker2 = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker2)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal + })); + } catch (err) { + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); } - } - this.telemetryString = userAgentInfo.join(" "); - } - /** - * Creates a TelemetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + }, "parseObjects_1")); } }; - var _defaultHttpClient = new coreHttp.DefaultHttpClient(); - function getCachedDefaultHttpClient() { - return _defaultHttpClient; - } - __name(getCachedDefaultHttpClient, "getCachedDefaultHttpClient"); - var Constants = { - DefaultScope: "/.default", - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization" + var AvroReadable = class { + static { + __name(this, "AvroReadable"); } }; - var DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1e3, - retryIntervalInMs: 3e3, - refreshWindowInMs: 1e3 * 60 * 2 - // Start refreshing 2m before expiry - }; - async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } catch (_a) { - return null; - } - } else { - const finalToken = await getAccessToken(); - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } + var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); + var AvroReadableFromStream = class extends AvroReadable { + static { + __name(this, "AvroReadableFromStream"); } - __name(tryGetAccessToken, "tryGetAccessToken"); - let token = await tryGetAccessToken(); - while (token === null) { - await coreHttp.delay(retryIntervalInMs); - token = await tryGetAccessToken(); + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; } - return token; - } - __name(beginRefresh, "beginRefresh"); - function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return !cycler.isRefreshing && ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now(); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now(); + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); } - }; - function refresh(getTokenOptions) { + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { var _a; - if (!cycler.isRefreshing) { - const tryGetAccessToken = /* @__PURE__ */ __name(() => credential.getToken(scopes, getTokenOptions), "tryGetAccessToken"); - refreshWorker = beginRefresh( - tryGetAccessToken, - options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now() - ).then((_token) => { - refreshWorker = null; - token = _token; - return token; - }).catch((reason) => { - refreshWorker = null; - token = null; - throw reason; - }); + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; } - return refreshWorker; - } - __name(refresh, "refresh"); - return async (tokenOptions) => { - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); } - return token; - }; - } - __name(createTokenCycler, "createTokenCycler"); - function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; - } - return; - } - __name(getChallenge, "getChallenge"); - function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - return keyValuePairs.reduce((a, b) => Object.assign(Object.assign({}, a), b), {}); - } - __name(parseChallenge, "parseChallenge"); - function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { - let getToken = createTokenCycler(credential, scopes); - class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { - static { - __name(this, "StorageBearerTokenChallengeAuthenticationPolicy"); + if (size === 0) { + return new Uint8Array(); } - constructor(nextPolicy, options) { - super(nextPolicy, options); + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const getTokenInternal = getToken; - const token = (await getTokenInternal({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - } - })).token; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - const response = await this._nextPolicy.sendRequest(webResource); - if ((response === null || response === void 0 ? void 0 : response.status) === 401) { - const challenge = getChallenge(response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; - const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.getPath().split("/"); - const tenantId = pathSegments[1]; - const getTokenForChallenge = createTokenCycler(credential, challengeScopes); - const tokenForChallenge = (await getTokenForChallenge({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - }, - tenantId - })).token; - getToken = getTokenForChallenge; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); - return this._nextPolicy.sendRequest(webResource); + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + return this.toUint8Array(chunk); + } else { + return new Promise((resolve, reject) => { + const cleanUp = /* @__PURE__ */ __name(() => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }, "cleanUp"); + const readableCallback = /* @__PURE__ */ __name(() => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + resolve(this.toUint8Array(callbackChunk)); + } + }, "readableCallback"); + const rejectCallback = /* @__PURE__ */ __name(() => { + cleanUp(); + reject(); + }, "rejectCallback"); + const abortHandler = /* @__PURE__ */ __name(() => { + cleanUp(); + reject(ABORT_ERROR); + }, "abortHandler"); + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); } - } - return response; - } - } - return { - create: /* @__PURE__ */ __name((nextPolicy, options) => { - return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); - }, "create") - }; - } - __name(storageBearerTokenChallengeAuthenticationPolicy, "storageBearerTokenChallengeAuthenticationPolicy"); - function isPipelineLike(pipeline) { - if (!pipeline || typeof pipeline !== "object") { - return false; - } - const castPipeline = pipeline; - return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; - } - __name(isPipelineLike, "isPipelineLike"); - var Pipeline = class { + }); + } + } + }; + var BlobQuickQueryStream = class extends stream.Readable { static { - __name(this, "Pipeline"); + __name(this, "BlobQuickQueryStream"); } /** - * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * Creates an instance of BlobQuickQueryStream. * - * @param factories - + * @param source - The current ReadableStream returned from getter * @param options - */ - constructor(factories, options = {}) { - this.factories = factories; - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); - } - /** - * Transfer Pipeline object to ServiceClientOptions object which is required by - * ServiceClient constructor. - * - * @returns The ServiceClientOptions object from this Pipeline. - */ - toServiceClientOptions() { - return { - httpClient: this.options.httpClient, - requestPolicyFactories: this.factories - }; + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } - }; - function newPipeline(credential, pipelineOptions = {}) { - var _a; - if (credential === void 0) { - credential = new AnonymousCredential(); + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } } - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ - coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), - coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), - telemetryPolicy, - coreHttp.generateClientRequestIdPolicy(), - new StorageBrowserPolicyFactory(), - new StorageRetryPolicyFactory(pipelineOptions.retryOptions), - // Default deserializationPolicy is provided by protocol layer - // Use customized XML char key of "#" so we could deserialize metadata - // with "_" key - coreHttp.deserializationPolicy(void 0, { xmlCharKey: "#" }), - coreHttp.logPolicy({ - logger: logger.info, - allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters - }) - ]; - if (coreHttp.isNode) { - factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); - factories.push(coreHttp.disableResponseDecompressionPolicy()); + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); } - factories.push(coreHttp.isTokenCredential(credential) ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); - return new Pipeline(factories, pipelineOptions); - } - __name(newPipeline, "newPipeline"); - var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy { + }; + var BlobQueryResponse = class { static { - __name(this, "StorageSharedKeyCredentialPolicy"); + __name(this, "BlobQueryResponse"); } /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - * @param options - - * @param factory - */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } /** - * Signs request. + * Indicates that the service supports + * requests for partial file content. * - * @param request - + * @readonly */ - signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, (/* @__PURE__ */ new Date()).toUTCString()); - if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); - } - const stringToSign = [ - request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) - ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - return request; + get acceptRanges() { + return this.originalResponse.acceptRanges; } /** - * Retrieve header value according to shared key sign rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * Returns if it was previously specified + * for the file. * - * @param request - - * @param headerName - + * @readonly */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); - if (!value) { - return ""; - } - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; - } - return value; + get cacheControl() { + return this.originalResponse.cacheControl; } /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. * - * @param request - + * @readonly */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); - }); - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; - } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()} -`; - }); - return canonicalizedHeadersStringToSign; + get contentDisposition() { + return this.originalResponse.contentDisposition; } /** - * Retrieves the webResource canonicalized resource string. + * Returns the value that was specified + * for the Content-Encoding request header. * - * @param request - + * @readonly */ - getCanonicalizedResourceString(request) { - const path2 = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path2}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } - } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += ` -${key}:${decodeURIComponent(lowercaseQueries[key])}`; - } - } - return canonicalizedResourceString; - } - }; - var StorageSharedKeyCredential = class extends Credential { - static { - __name(this, "StorageSharedKeyCredential"); + get contentEncoding() { + return this.originalResponse.contentEncoding; } /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + get contentLanguage() { + return this.originalResponse.contentLanguage; } /** - * Creates a StorageSharedKeyCredentialPolicy object. + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. * - * @param nextPolicy - - * @param options - + * @readonly */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto4.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); - } - }; - var packageName = "azure-storage-blob"; - var packageVersion = "12.17.0"; - var StorageClientContext = class extends coreHttp__namespace.ServiceClient { - static { - __name(this, "StorageClientContext"); + get blobType() { + return this.originalResponse.blobType; } /** - * Initializes a new instance of the StorageClientContext class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options + * The number of bytes present in the + * response body. + * + * @readonly */ - constructor(url2, options) { - if (url2 === void 0) { - throw new Error("'url' cannot be null"); - } - if (!options) { - options = {}; - } - if (!options.userAgent) { - const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; - } - super(void 0, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; - this.url = url2; - this.version = options.version || "2023-11-03"; - } - }; - var StorageClient = class { - static { - __name(this, "StorageClient"); + get contentLength() { + return this.originalResponse.contentLength; } /** - * Creates an instance of StorageClient. - * @param url - url to resource - * @param pipeline - request policy pipeline. + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly */ - constructor(url2, pipeline) { - this.url = escapeURLPath(url2); - this.accountName = getAccountNameFromUrl(url2); - this.pipeline = pipeline; - this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); - this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { - if (coreHttp.isNode && factory instanceof StorageSharedKeyCredential || factory instanceof AnonymousCredential) { - this.credential = factory; - } else if (coreHttp.isTokenCredential(factory.credential)) { - this.credential = factory.credential; - } - } - const storageClientContext = this.storageClientContext; - storageClientContext.requestContentType = void 0; - } - }; - var createSpan = coreTracing.createSpanFunction({ - packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage" - }); - function convertTracingToRequestOptionsBase(options) { - var _a, _b; - return { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext - }; - } - __name(convertTracingToRequestOptionsBase, "convertTracingToRequestOptionsBase"); - var BlobSASPermissions = class _BlobSASPermissions { - static { - __name(this, "BlobSASPermissions"); + get contentMD5() { + return this.originalResponse.contentMD5; } - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; } /** - * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * The content type specified for the file. + * The default content type is 'application/octet-stream' * - * @param permissions - + * @readonly */ - static parse(permissions) { - const blobSASPermissions = new _BlobSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - blobSASPermissions.read = true; - break; - case "a": - blobSASPermissions.add = true; - break; - case "c": - blobSASPermissions.create = true; - break; - case "w": - blobSASPermissions.write = true; - break; - case "d": - blobSASPermissions.delete = true; - break; - case "x": - blobSASPermissions.deleteVersion = true; - break; - case "t": - blobSASPermissions.tag = true; - break; - case "m": - blobSASPermissions.move = true; - break; - case "e": - blobSASPermissions.execute = true; - break; - case "i": - blobSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - blobSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission: ${char}`); - } - } - return blobSASPermissions; + get contentType() { + return this.originalResponse.contentType; } /** - * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const blobSASPermissions = new _BlobSASPermissions(); - if (permissionLike.read) { - blobSASPermissions.read = true; - } - if (permissionLike.add) { - blobSASPermissions.add = true; - } - if (permissionLike.create) { - blobSASPermissions.create = true; - } - if (permissionLike.write) { - blobSASPermissions.write = true; - } - if (permissionLike.delete) { - blobSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - blobSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - blobSASPermissions.tag = true; - } - if (permissionLike.move) { - blobSASPermissions.move = true; - } - if (permissionLike.execute) { - blobSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - blobSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - blobSASPermissions.permanentDelete = true; - } - return blobSASPermissions; + get copyCompletedOn() { + return void 0; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * String identifier for the last attempted Copy + * File operation where this file was the destination file. * - * @returns A string which represents the BlobSASPermissions + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); + get copyId() { + return this.originalResponse.copyId; } - }; - var ContainerSASPermissions = class _ContainerSASPermissions { - static { - __name(this, "ContainerSASPermissions"); + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; } - constructor() { - this.read = false; - this.add = false; - this.create = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.tag = false; - this.move = false; - this.execute = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; - this.filterByTags = false; + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; } /** - * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an - * Error if it encounters a character that does not correspond to a valid permission. + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' * - * @param permissions - + * @readonly */ - static parse(permissions) { - const containerSASPermissions = new _ContainerSASPermissions(); - for (const char of permissions) { - switch (char) { - case "r": - containerSASPermissions.read = true; - break; - case "a": - containerSASPermissions.add = true; - break; - case "c": - containerSASPermissions.create = true; - break; - case "w": - containerSASPermissions.write = true; - break; - case "d": - containerSASPermissions.delete = true; - break; - case "l": - containerSASPermissions.list = true; - break; - case "t": - containerSASPermissions.tag = true; - break; - case "x": - containerSASPermissions.deleteVersion = true; - break; - case "m": - containerSASPermissions.move = true; - break; - case "e": - containerSASPermissions.execute = true; - break; - case "i": - containerSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - containerSASPermissions.permanentDelete = true; - break; - case "f": - containerSASPermissions.filterByTags = true; - break; - default: - throw new RangeError(`Invalid permission ${char}`); - } - } - return containerSASPermissions; + get copyStatus() { + return this.originalResponse.copyStatus; } /** - * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. * - * @param permissionLike - + * @readonly */ - static from(permissionLike) { - const containerSASPermissions = new _ContainerSASPermissions(); - if (permissionLike.read) { - containerSASPermissions.read = true; - } - if (permissionLike.add) { - containerSASPermissions.add = true; - } - if (permissionLike.create) { - containerSASPermissions.create = true; - } - if (permissionLike.write) { - containerSASPermissions.write = true; - } - if (permissionLike.delete) { - containerSASPermissions.delete = true; - } - if (permissionLike.list) { - containerSASPermissions.list = true; - } - if (permissionLike.deleteVersion) { - containerSASPermissions.deleteVersion = true; - } - if (permissionLike.tag) { - containerSASPermissions.tag = true; - } - if (permissionLike.move) { - containerSASPermissions.move = true; - } - if (permissionLike.execute) { - containerSASPermissions.execute = true; - } - if (permissionLike.setImmutabilityPolicy) { - containerSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - containerSASPermissions.permanentDelete = true; - } - if (permissionLike.filterByTags) { - containerSASPermissions.filterByTags = true; - } - return containerSASPermissions; + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; } /** - * Converts the given permissions to a string. Using this method will guarantee the permissions are in an - * order accepted by the service. + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. * - * The order of the characters should be as specified here to ensure correctness. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * + * @readonly */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.list) { - permissions.push("l"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.move) { - permissions.push("m"); - } - if (this.execute) { - permissions.push("e"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - if (this.filterByTags) { - permissions.push("f"); - } - return permissions.join(""); + get leaseState() { + return this.originalResponse.leaseState; } - }; - var UserDelegationKeyCredential = class { - static { - __name(this, "UserDelegationKeyCredential"); + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; } /** - * Creates an instance of UserDelegationKeyCredential. - * @param accountName - - * @param userDelegationKey - + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly */ - constructor(accountName, userDelegationKey) { - this.accountName = accountName; - this.userDelegationKey = userDelegationKey; - this.key = Buffer.from(userDelegationKey.value, "base64"); + get date() { + return this.originalResponse.date; } /** - * Generates a hash signature for an HTTP request or for a SAS. + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. * - * @param stringToSign - + * @readonly */ - computeHMACSHA256(stringToSign) { - return crypto4.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; } - }; - function ipRangeToString(ipRange) { - return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; - } - __name(ipRangeToString, "ipRangeToString"); - exports2.SASProtocol = void 0; - (function(SASProtocol) { - SASProtocol["Https"] = "https"; - SASProtocol["HttpsAndHttp"] = "https,http"; - })(exports2.SASProtocol || (exports2.SASProtocol = {})); - var SASQueryParameters = class { - static { - __name(this, "SASQueryParameters"); + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; } - constructor(version4, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { - this.version = version4; - this.signature = signature; - if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { - this.permissions = permissionsOrOptions.permissions; - this.services = permissionsOrOptions.services; - this.resourceTypes = permissionsOrOptions.resourceTypes; - this.protocol = permissionsOrOptions.protocol; - this.startsOn = permissionsOrOptions.startsOn; - this.expiresOn = permissionsOrOptions.expiresOn; - this.ipRangeInner = permissionsOrOptions.ipRange; - this.identifier = permissionsOrOptions.identifier; - this.encryptionScope = permissionsOrOptions.encryptionScope; - this.resource = permissionsOrOptions.resource; - this.cacheControl = permissionsOrOptions.cacheControl; - this.contentDisposition = permissionsOrOptions.contentDisposition; - this.contentEncoding = permissionsOrOptions.contentEncoding; - this.contentLanguage = permissionsOrOptions.contentLanguage; - this.contentType = permissionsOrOptions.contentType; - if (permissionsOrOptions.userDelegationKey) { - this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; - this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; - this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; - this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; - this.signedService = permissionsOrOptions.userDelegationKey.signedService; - this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; - this.correlationId = permissionsOrOptions.correlationId; - } - } else { - this.services = services; - this.resourceTypes = resourceTypes; - this.expiresOn = expiresOn2; - this.permissions = permissionsOrOptions; - this.protocol = protocol; - this.startsOn = startsOn; - this.ipRangeInner = ipRange; - this.encryptionScope = encryptionScope2; - this.identifier = identifier; - this.resource = resource; - this.cacheControl = cacheControl; - this.contentDisposition = contentDisposition; - this.contentEncoding = contentEncoding; - this.contentLanguage = contentLanguage; - this.contentType = contentType2; - if (userDelegationKey) { - this.signedOid = userDelegationKey.signedObjectId; - this.signedTenantId = userDelegationKey.signedTenantId; - this.signedStartsOn = userDelegationKey.signedStartsOn; - this.signedExpiresOn = userDelegationKey.signedExpiresOn; - this.signedService = userDelegationKey.signedService; - this.signedVersion = userDelegationKey.signedVersion; - this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; - this.correlationId = correlationId; - } - } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; } /** - * Optional. IP range allowed for this SAS. + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). * * @readonly */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start - }; - } - return void 0; + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; } /** - * Encodes all SAS query parameters into a string that can be appended to a URL. + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. * + * @readonly */ - toString() { - const params = [ - "sv", - "ss", - "srt", - "spr", - "st", - "se", - "sip", - "si", - "ses", - "skoid", - "sktid", - "skt", - "ske", - "sks", - "skv", - "sr", - "sp", - "sig", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "saoid", - "scid" - ]; - const queries = []; - for (const param of params) { - switch (param) { - case "sv": - this.tryAppendQueryParameter(queries, param, this.version); - break; - case "ss": - this.tryAppendQueryParameter(queries, param, this.services); - break; - case "srt": - this.tryAppendQueryParameter(queries, param, this.resourceTypes); - break; - case "spr": - this.tryAppendQueryParameter(queries, param, this.protocol); - break; - case "st": - this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : void 0); - break; - case "se": - this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : void 0); - break; - case "sip": - this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : void 0); - break; - case "si": - this.tryAppendQueryParameter(queries, param, this.identifier); - break; - case "ses": - this.tryAppendQueryParameter(queries, param, this.encryptionScope); - break; - case "skoid": - this.tryAppendQueryParameter(queries, param, this.signedOid); - break; - case "sktid": - this.tryAppendQueryParameter(queries, param, this.signedTenantId); - break; - case "skt": - this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : void 0); - break; - case "ske": - this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : void 0); - break; - case "sks": - this.tryAppendQueryParameter(queries, param, this.signedService); - break; - case "skv": - this.tryAppendQueryParameter(queries, param, this.signedVersion); - break; - case "sr": - this.tryAppendQueryParameter(queries, param, this.resource); - break; - case "sp": - this.tryAppendQueryParameter(queries, param, this.permissions); - break; - case "sig": - this.tryAppendQueryParameter(queries, param, this.signature); - break; - case "rscc": - this.tryAppendQueryParameter(queries, param, this.cacheControl); - break; - case "rscd": - this.tryAppendQueryParameter(queries, param, this.contentDisposition); - break; - case "rsce": - this.tryAppendQueryParameter(queries, param, this.contentEncoding); - break; - case "rscl": - this.tryAppendQueryParameter(queries, param, this.contentLanguage); - break; - case "rsct": - this.tryAppendQueryParameter(queries, param, this.contentType); - break; - case "saoid": - this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); - break; - case "scid": - this.tryAppendQueryParameter(queries, param, this.correlationId); - break; - } - } - return queries.join("&"); + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; } /** - * A private helper method used to filter and append query key/value pairs into an array. + * A name-value pair + * to associate with a file storage object. * - * @param queries - - * @param key - - * @param value - + * @readonly */ - tryAppendQueryParameter(queries, key, value) { - if (!value) { - return; - } - key = encodeURIComponent(key); - value = encodeURIComponent(value); - if (key.length > 0 && value.length > 0) { - queries.push(`${key}=${value}`); - } - } - }; - function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; - let userDelegationKeyCredential; - if (sharedKeyCredential === void 0 && accountName !== void 0) { - userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); - } - if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { - throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + get metadata() { + return this.originalResponse.metadata; } - if (version4 >= "2020-12-06") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); - } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; } - if (version4 >= "2018-11-09") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); - } else { - if (version4 >= "2020-02-10") { - return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); - } else { - return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); - } - } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; } - if (version4 >= "2015-04-05") { - if (sharedKeyCredential !== void 0) { - return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); - } else { - throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); - } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; } - throw new RangeError("'version' must be >= '2015-04-05'."); - } - __name(generateBlobSASQueryParameters, "generateBlobSASQueryParameters"); - function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; } - let resource = "c"; - if (blobSASSignatureValues.blobName) { - resource = "b"; + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return void 0; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); - } - __name(generateBlobSASQueryParameters20150405, "generateBlobSASQueryParameters20150405"); - function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : void 0; } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + }; + exports2.BlockBlobTier = void 0; + (function(BlockBlobTier) { + BlockBlobTier["Hot"] = "Hot"; + BlockBlobTier["Cool"] = "Cool"; + BlockBlobTier["Cold"] = "Cold"; + BlockBlobTier["Archive"] = "Archive"; + })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); + exports2.PremiumPageBlobTier = void 0; + (function(PremiumPageBlobTier) { + PremiumPageBlobTier["P4"] = "P4"; + PremiumPageBlobTier["P6"] = "P6"; + PremiumPageBlobTier["P10"] = "P10"; + PremiumPageBlobTier["P15"] = "P15"; + PremiumPageBlobTier["P20"] = "P20"; + PremiumPageBlobTier["P30"] = "P30"; + PremiumPageBlobTier["P40"] = "P40"; + PremiumPageBlobTier["P50"] = "P50"; + PremiumPageBlobTier["P60"] = "P60"; + PremiumPageBlobTier["P70"] = "P70"; + PremiumPageBlobTier["P80"] = "P80"; + })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); + function toAccessTier(tier2) { + if (tier2 === void 0) { + return void 0; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); + return tier2; } - __name(generateBlobSASQueryParameters20181109, "generateBlobSASQueryParameters20181109"); - function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + __name(toAccessTier, "toAccessTier"); + function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - blobSASSignatureValues.identifier, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", - blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", - blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", - blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" - ].join("\n"); - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope); } - __name(generateBlobSASQueryParameters20201206, "generateBlobSASQueryParameters20201206"); - function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + __name(ensureCpkIfSpecified, "ensureCpkIfSpecified"); + exports2.StorageBlobAudience = void 0; + (function(StorageBlobAudience) { + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; + })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); + function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; + } + __name(getBlobServiceAccountAudience, "getBlobServiceAccountAudience"); + function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + return Object.assign(Object.assign({}, response), { + pageRange, + clearRange, + _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange + } }) + }); + } + __name(rangeResponseFromModel, "rangeResponseFromModel"); + var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { + static { + __name(this, "BlobBeginCopyFromUrlPoller"); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + constructor(options) { + const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { + blobClient, + copySource: copySource2, + startCopyFromURLOptions + })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); } + this.intervalInMs = intervalInMs; } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); - } - __name(generateBlobSASQueryParametersUDK20181109, "generateBlobSASQueryParametersUDK20181109"); - function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); - } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; - } + delay() { + return coreHttp.delay(this.intervalInMs); } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } + }; + var cancel = /* @__PURE__ */ __name(async function cancel2(options = {}) { + const state = this.state; + const { copyId: copyId2 } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); - } - __name(generateBlobSASQueryParametersUDK20200210, "generateBlobSASQueryParametersUDK20200210"); - function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { - blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); - if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { - throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + if (!copyId2) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); } - let resource = "c"; - let timestamp = blobSASSignatureValues.snapshotTime; - if (blobSASSignatureValues.blobName) { - resource = "b"; - if (blobSASSignatureValues.snapshotTime) { - resource = "bs"; - } else if (blobSASSignatureValues.versionId) { - resource = "bv"; - timestamp = blobSASSignatureValues.versionId; + await state.blobClient.abortCopyFromURL(copyId2, { + abortSignal: options.abortSignal + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + }, "cancel"); + var update = /* @__PURE__ */ __name(async function update2(options = {}) { + const state = this.state; + const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; } - } - let verifiedPermissions; - if (blobSASSignatureValues.permissions) { - if (blobSASSignatureValues.blobName) { - verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); - } else { - verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { + options.fireProgress(state); + } else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } catch (err) { + state.error = err; + state.isCompleted = true; } } - const stringToSign = [ - verifiedPermissions ? verifiedPermissions : "", - blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", - blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", - getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), - userDelegationKeyCredential.userDelegationKey.signedObjectId, - userDelegationKeyCredential.userDelegationKey.signedTenantId, - userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", - userDelegationKeyCredential.userDelegationKey.signedService, - userDelegationKeyCredential.userDelegationKey.signedVersion, - blobSASSignatureValues.preauthorizedAgentObjectId, - void 0, - blobSASSignatureValues.correlationId, - blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", - blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", - blobSASSignatureValues.version, - resource, - timestamp, - blobSASSignatureValues.encryptionScope, - blobSASSignatureValues.cacheControl, - blobSASSignatureValues.contentDisposition, - blobSASSignatureValues.contentEncoding, - blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType - ].join("\n"); - const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); - } - __name(generateBlobSASQueryParametersUDK20201206, "generateBlobSASQueryParametersUDK20201206"); - function getCanonicalName(accountName, containerName, blobName) { - const elements = [`/blob/${accountName}/${containerName}`]; - if (blobName) { - elements.push(`/${blobName}`); - } - return elements.join(""); + return makeBlobBeginCopyFromURLPollOperation(state); + }, "update"); + var toString = /* @__PURE__ */ __name(function toString2() { + return JSON.stringify({ state: this.state }, (key, value) => { + if (key === "blobClient") { + return void 0; + } + return value; + }); + }, "toString"); + function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update + }; } - __name(getCanonicalName, "getCanonicalName"); - function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version4 < "2018-11-09") { - throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { - throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); - } - if (blobSASSignatureValues.versionId && version4 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); - } - if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { - throw RangeError("Must provide 'blobName' when providing 'versionId'."); - } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); - } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); - } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version4 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); - } - if (version4 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { - throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); - } - if (version4 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { - throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); - } - if (version4 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { - throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + __name(makeBlobBeginCopyFromURLPollOperation, "makeBlobBeginCopyFromURLPollOperation"); + function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); } - if (blobSASSignatureValues.encryptionScope && version4 < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } - blobSASSignatureValues.version = version4; - return blobSASSignatureValues; + return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; } - __name(SASSignatureValuesSanityCheckAndAutofill, "SASSignatureValuesSanityCheckAndAutofill"); - var BlobLeaseClient = class { + __name(rangeToString, "rangeToString"); + var BatchStates; + (function(BatchStates2) { + BatchStates2[BatchStates2["Good"] = 0] = "Good"; + BatchStates2[BatchStates2["Error"] = 1] = "Error"; + })(BatchStates || (BatchStates = {})); + var Batch = class { static { - __name(this, "BlobLeaseClient"); + __name(this, "Batch"); } /** - * Creates an instance of BlobLeaseClient. - * @param client - The client to make the lease operation requests. - * @param leaseId - Initial proposed lease id. + * Creates an instance of Batch. + * @param concurrency - */ - constructor(client, leaseId2) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); - this._url = client.url; - if (client.name === void 0) { - this._isContainer = true; - this._containerOrBlobOperation = new Container(clientContext); - } else { - this._isContainer = false; - this._containerOrBlobOperation = new Blob$1(clientContext); - } - if (!leaseId2) { - leaseId2 = coreHttp.generateUuid(); + constructor(concurrency = 5) { + this.actives = 0; + this.completed = 0; + this.offset = 0; + this.operations = []; + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); } - this._leaseId = leaseId2; - } - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); } /** - * Gets the url. + * Add a operation into queue. * - * @readonly + * @param operation - */ - get url() { - return this._url; + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } catch (error) { + this.emitter.emit("error", error); + } + }); } /** - * Establishes and manages a lock on a container for delete operations, or on a blob - * for write and delete operations. - * The lock duration can be 15 to 60 seconds, or can be infinite. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Start execute operations in the queue. * - * @param duration - Must be between 15 to 60 seconds, or infinite (-1) - * @param options - option to configure lease management operations. - * @returns Response data for acquire lease operation. */ - async acquireLease(duration2, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration: duration2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); }); - throw e; - } finally { - span.end(); - } + }); } /** - * To change the ID of the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Get next operation to be executed. Return null when reaching ends. * - * @param proposedLeaseId - the proposed new lease Id. - * @param options - option to configure lease management operations. - * @returns Response data for change lease operation. */ - async changeLease(proposedLeaseId2, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); - } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - this._leaseId = proposedLeaseId2; - return response; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; } + return null; } /** - * To free the lease if it is no longer needed so that another client may - * immediately acquire a lease against the container or the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. * - * @param options - option to configure lease management operations. - * @returns Response data for release lease operation. */ - async releaseLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + parallelExecute() { + if (this.state === BatchStates.Error) { + return; } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } else { + return; + } } } + }; + var BuffersStream = class extends stream.Readable { + static { + __name(this, "BuffersStream"); + } /** - * To renew the lease. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. * - * @param options - Optional option to configure lease management operations. - * @returns Response data for renew lease operation. + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers */ - async renewLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); } } /** - * To end the lease but ensure that another client cannot acquire a new lease - * until the current lease period has expired. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * and - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * Internal _read() that will be called when the stream wants to pull more data in. * - * @param breakPeriod - Break period - * @param options - Optional options to configure lease management operations. - * @returns Response data for break lease operation. + * @param size - Optional. The size of data to be read */ - async breakLease(breakPeriod2, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); - if (this._isContainer && (((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone || ((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { - throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod: breakPeriod2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } else { + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } else if (outBuffers.length === 1) { + this.push(outBuffers[0]); } } }; - var RetriableReadableStream = class extends stream.Readable { + var maxBufferLength = require("buffer").constants.MAX_LENGTH; + var PooledBuffer = class { static { - __name(this, "RetriableReadableStream"); + __name(this, "PooledBuffer"); + } + constructor(capacity, buffers, totalLength) { + this.buffers = []; + this.capacity = capacity; + this._size = 0; + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } } /** - * Creates an instance of RetriableReadableStream. + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. * - * @param source - The current ReadableStream returned from getter - * @param getter - A method calling downloading request returning - * a new ReadableStream from specified offset - * @param offset - Offset position in original data source to read - * @param count - How much data in original data source to read - * @param options - */ - constructor(source, getter, offset, count, options = {}) { - super({ highWaterMark: options.highWaterMark }); - this.retries = 0; - this.sourceDataHandler = (data) => { - if (this.options.doInjectErrorOnce) { - this.options.doInjectErrorOnce = void 0; - this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); - return; - } - this.offset += data.length; - if (this.onProgress) { - this.onProgress({ loadedBytes: this.offset - this.start }); - } - if (!this.push(data)) { - this.source.pause(); - } - }; - this.sourceErrorOrEndHandler = (err) => { - if (err && err.name === "AbortError") { - this.destroy(err); - return; + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; } - this.removeSourceEventHandlers(); - if (this.offset - 1 === this.end) { - this.push(null); - } else if (this.offset <= this.end) { - if (this.retries < this.maxRetryRequests) { - this.retries += 1; - this.getter(this.offset).then((newSource) => { - this.source = newSource; - this.setSourceEventHandlers(); - return; - }).catch((error) => { - this.destroy(error); - }); - } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); - } - } else { - this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + if (targetOffset === target.length) { + j++; + targetOffset = 0; } - }; - this.getter = getter; - this.source = source; - this.start = offset; - this.offset = offset; - this.end = offset + count - 1; - this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; - this.onProgress = options.onProgress; - this.options = options; - this.setSourceEventHandlers(); - } - _read() { - this.source.resume(); - } - setSourceEventHandlers() { - this.source.on("data", this.sourceDataHandler); - this.source.on("end", this.sourceErrorOrEndHandler); - this.source.on("error", this.sourceErrorOrEndHandler); - } - removeSourceEventHandlers() { - this.source.removeListener("data", this.sourceDataHandler); - this.source.removeListener("end", this.sourceErrorOrEndHandler); - this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } } - _destroy(error, callback) { - this.removeSourceEventHandlers(); - this.source.destroy(); - callback(error === null ? void 0 : error); + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); } }; - var BlobDownloadResponse = class { + var BufferScheduler = class { static { - __name(this, "BlobDownloadResponse"); + __name(this, "BufferScheduler"); } /** - * Creates an instance of BlobDownloadResponse. + * Creates an instance of BufferScheduler. * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + this.emitter = new events.EventEmitter(); + this.offset = 0; + this.isStreamEnd = false; + this.isError = false; + this.executingOutgoingHandlers = 0; + this.numBuffers = 0; + this.unresolvedDataArray = []; + this.unresolvedLength = 0; + this.incoming = []; + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; } /** - * Indicates that the service supports - * requests for partial file content. + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. * - * @readonly */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); + } else if (this.unresolvedLength >= this.bufferSize) { + return; + } else { + resolve(); + } + } + }); + }); } /** - * Returns if it was previously specified - * for the file. + * Insert a new data into unresolved array. * - * @readonly + * @param data - */ - get cacheControl() { - return this.originalResponse.cacheControl; + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; } /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. * - * @readonly */ - get contentDisposition() { - return this.originalResponse.contentDisposition; + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; } /** - * Returns the value that was specified - * for the Content-Encoding request header. + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. * - * @readonly - */ - get contentEncoding() { - return this.originalResponse.contentEncoding; - } - /** - * Returns the value that was specified - * for the Content-Language request header. + * Return false when available buffers in incoming are not enough, else true. * - * @readonly + * @returns Return false when buffers in incoming are not enough, else true. */ - get contentLanguage() { - return this.originalResponse.contentLanguage; + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } else { + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; } /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. - * - * @readonly + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); } /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * Trigger a outgoing handler for a buffer shifted from outgoing. * - * @readonly + * @param buffer - */ - get blobType() { - return this.originalResponse.blobType; + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); } /** - * The number of bytes present in the - * response body. + * Return buffer used by outgoing handler into incoming. * - * @readonly + * @param buffer - */ - get contentLength() { - return this.originalResponse.contentLength; + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. - * - * @readonly - */ - get contentMD5() { - return this.originalResponse.contentMD5; + }; + async function streamToBuffer(stream2, buffer, offset, end, encoding) { + let pos = 0; + const count = end - offset; + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream2.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream2.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream2.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream2.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); + } + __name(streamToBuffer, "streamToBuffer"); + async function streamToBuffer2(stream2, buffer, encoding) { + let pos = 0; + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream2.on("readable", () => { + let chunk = stream2.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream2.on("end", () => { + resolve(pos); + }); + stream2.on("error", reject); + }); + } + __name(streamToBuffer2, "streamToBuffer2"); + async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); + } + __name(readStreamToLocalFile, "readStreamToLocalFile"); + var fsStat = util__namespace.promisify(fs__namespace.stat); + var fsCreateReadStream = fs__namespace.createReadStream; + var BlobClient = class _BlobClient extends StorageClient { + static { + __name(this, "BlobClient"); } - /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. - * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + options = options || {}; + let pipeline; + let url2; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url2, pipeline); + ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' - * - * @readonly + * The name of the blob. */ - get contentType() { - return this.originalResponse.contentType; + get name() { + return this._name; } /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. - * - * @readonly + * The name of the storage container the blob is associated with. */ - get copyCompletedOn() { - return this.originalResponse.copyCompletedOn; + get containerName() { + return this._containerName; } /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @readonly + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ - get copyId() { - return this.originalResponse.copyId; + withSnapshot(snapshot2) { + return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. * - * @readonly + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. */ - get copyProgress() { - return this.originalResponse.copyProgress; + withVersion(versionId2) { + return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); } /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * Creates a AppendBlobClient object. * - * @readonly */ - get copySource() { - return this.originalResponse.copySource; + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); } /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * Creates a BlockBlobClient object. * - * @readonly */ - get copyStatus() { - return this.originalResponse.copyStatus; + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); } /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * Creates a PageBlobClient object. * - * @readonly */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. * - * @readonly - */ - get leaseDuration() { - return this.originalResponse.leaseDuration; - } - /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody * - * @readonly + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` */ - get leaseState() { - return this.originalResponse.leaseState; + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? void 0 : options.onProgress + // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + if (!coreHttp.isNode) { + return wrappedRes; + } + if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === void 0) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a2; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. + * Returns true if the Azure blob resource represented by this client exists; false otherwise. * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. * - * @readonly + * @param options - options to Exists operation. */ - get date() { - return this.originalResponse.date; + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + }); + return true; + } catch (e) { + if (e.statusCode === 404) { + return false; + } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. * - * @readonly + * @param options - Optional options to Get Properties operation. */ - get etag() { - return this.originalResponse.etag; + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The number of tags associated with the blob + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get tagCount() { - return this.originalResponse.tagCount; + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The error code. + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get errorCode() { - return this.originalResponse.errorCode; + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob * - * @readonly + * @param options - Optional options to Blob Undelete operation. */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. + * Sets system properties on the blob. * - * @readonly - */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; - } - /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * - * @readonly + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. */ - get lastModified() { - return this.originalResponse.lastModified; + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Returns the UTC date and time generated by the service that indicates the time at which the blob was - * last read or written to. + * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * @readonly - */ - get lastAccessed() { - return this.originalResponse.lastAccessed; - } - /** - * Returns the date and time the blob was created. + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata * - * @readonly + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. */ - get createdOn() { - return this.originalResponse.createdOn; + async setMetadata(metadata2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * A name-value pair - * to associate with a file storage object. + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). * - * @readonly + * @param tags - + * @param options - */ - get metadata() { - return this.originalResponse.metadata; + async setTags(tags2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags2) })); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * Gets the tags associated with the underlying blob. * - * @readonly + * @param options - */ - get requestId() { - return this.originalResponse.requestId; + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * Get a {@link BlobLeaseClient} that manages leases on the blob. * - * @readonly + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); } /** - * Indicates the version of the Blob service used - * to execute the request. + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * - * @readonly + * @param options - Optional options to the Blob Create Snapshot operation. */ - get version() { - return this.originalResponse.version; + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Indicates the versionId of the downloaded blob version. + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. * - * @readonly - */ - get versionId() { - return this.originalResponse.versionId; - } - /** - * Indicates whether version of this blob is a current version. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob * - * @readonly - */ - get isCurrentVersion() { - return this.originalResponse.isCurrentVersion; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * Example using automatic polling: * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) - */ - get contentCrc64() { - return this.originalResponse.contentCrc64; - } - /** - * Object Replication Policy Id of the destination blob. + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` * - * @readonly - */ - get objectReplicationDestinationPolicyId() { - return this.originalResponse.objectReplicationDestinationPolicyId; - } - /** - * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * Example using manual polling: * - * @readonly - */ - get objectReplicationSourceProperties() { - return this.originalResponse.objectReplicationSourceProperties; - } - /** - * If this blob has been sealed. + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` * - * @readonly - */ - get isSealed() { - return this.originalResponse.isSealed; - } - /** - * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * Example using progress updates: * - * @readonly - */ - get immutabilityPolicyExpiresOn() { - return this.originalResponse.immutabilityPolicyExpiresOn; - } - /** - * Indicates immutability policy mode. + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` * - * @readonly - */ - get immutabilityPolicyMode() { - return this.originalResponse.immutabilityPolicyMode; - } - /** - * Indicates if a legal hold is present on the blob. + * Example using a changing polling interval (default 15 seconds): * - * @readonly - */ - get legalHold() { - return this.originalResponse.legalHold; - } - /** - * The response body as a browser Blob. - * Always undefined in node.js. + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` * - * @readonly - */ - get contentAsBlob() { - return this.originalResponse.blobBody; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Example using copy cancellation: * - * It will automatically retry when internal read stream unexpected ends. + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` * - * @readonly + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : void 0; + async beginCopyFromURL(copySource2, options = {}) { + const client = { + abortCopyFromURL: /* @__PURE__ */ __name((...args) => this.abortCopyFromURL(...args), "abortCopyFromURL"), + getProperties: /* @__PURE__ */ __name((...args) => this.getProperties(...args), "getProperties"), + startCopyFromURL: /* @__PURE__ */ __name((...args) => this.startCopyFromURL(...args), "startCopyFromURL") + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource: copySource2, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + await poller.poll(); + return poller; } /** - * The HTTP response. + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. */ - get _response() { - return this.originalResponse._response; - } - }; - var AVRO_SYNC_MARKER_SIZE = 16; - var AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); - var AVRO_CODEC_KEY = "avro.codec"; - var AVRO_SCHEMA_KEY = "avro.schema"; - var AvroParser = class _AvroParser { - static { - __name(this, "AvroParser"); + async abortCopyFromURL(copyId2, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Reads a fixed number of bytes from the stream. + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url * - * @param stream - - * @param length - + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ - static async readFixedBytes(stream2, length, options = {}) { - const bytes = await stream2.read(length, { abortSignal: options.abortSignal }); - if (bytes.length !== length) { - throw new Error("Hit stream end."); + async syncCopyFromURL(copySource2, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return bytes; } /** - * Reads a single byte from the stream. + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier * - * @param stream - - * @param options - + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. */ - static async readByte(stream2, options = {}) { - const buf = await _AvroParser.readFixedBytes(stream2, 1, options); - return buf[0]; - } - // int and long are stored in variable-length zig-zag coding. - // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt - // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types - static async readZigZagLong(stream2, options = {}) { - let zigZagEncoded = 0; - let significanceInBit = 0; - let byte, haveMoreByte, significanceInFloat; - do { - byte = await _AvroParser.readByte(stream2, options); - haveMoreByte = byte & 128; - zigZagEncoded |= (byte & 127) << significanceInBit; - significanceInBit += 7; - } while (haveMoreByte && significanceInBit < 28); - if (haveMoreByte) { - zigZagEncoded = zigZagEncoded; - significanceInFloat = 268435456; - do { - byte = await _AvroParser.readByte(stream2, options); - zigZagEncoded += (byte & 127) * significanceInFloat; - significanceInFloat *= 128; - } while (byte & 128); - const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; - if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { - throw new Error("Integer overflow."); - } - return res; + async setAccessTier(tier2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier2), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); - } - static async readLong(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); } - static async readInt(stream2, options = {}) { - return _AvroParser.readZigZagLong(stream2, options); - } - static async readNull() { - return null; - } - static async readBoolean(stream2, options = {}) { - const b = await _AvroParser.readByte(stream2, options); - if (b === 1) { - return true; - } else if (b === 0) { - return false; + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; } else { - throw new Error("Byte was not a boolean."); - } - } - static async readFloat(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 4, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat32(0, true); - } - static async readDouble(stream2, options = {}) { - const u8arr = await _AvroParser.readFixedBytes(stream2, 8, options); - const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); - return view.getFloat64(0, true); - } - static async readBytes(stream2, options = {}) { - const size = await _AvroParser.readLong(stream2, options); - if (size < 0) { - throw new Error("Bytes size was negative."); - } - return stream2.read(size, { abortSignal: options.abortSignal }); - } - static async readString(stream2, options = {}) { - const u8arr = await _AvroParser.readBytes(stream2, options); - const utf8decoder = new TextDecoder(); - return utf8decoder.decode(u8arr); - } - static async readMapPair(stream2, readItemMethod, options = {}) { - const key = await _AvroParser.readString(stream2, options); - const value = await readItemMethod(stream2, options); - return { key, value }; - } - static async readMap(stream2, readItemMethod, options = {}) { - const readPairMethod = /* @__PURE__ */ __name((s, opts = {}) => { - return _AvroParser.readMapPair(s, readItemMethod, opts); - }, "readPairMethod"); - const pairs = await _AvroParser.readArray(stream2, readPairMethod, options); - const dict = {}; - for (const pair of pairs) { - dict[pair.key] = pair.value; + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - return dict; - } - static async readArray(stream2, readItemMethod, options = {}) { - const items = []; - for (let count = await _AvroParser.readLong(stream2, options); count !== 0; count = await _AvroParser.readLong(stream2, options)) { - if (count < 0) { - await _AvroParser.readLong(stream2, options); - count = -count; + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; } - while (count--) { - const item = await readItemMethod(stream2, options); - items.push(item); + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); } - } - return items; - } - }; - var AvroComplex; - (function(AvroComplex2) { - AvroComplex2["RECORD"] = "record"; - AvroComplex2["ENUM"] = "enum"; - AvroComplex2["ARRAY"] = "array"; - AvroComplex2["MAP"] = "map"; - AvroComplex2["UNION"] = "union"; - AvroComplex2["FIXED"] = "fixed"; - })(AvroComplex || (AvroComplex = {})); - var AvroPrimitive; - (function(AvroPrimitive2) { - AvroPrimitive2["NULL"] = "null"; - AvroPrimitive2["BOOLEAN"] = "boolean"; - AvroPrimitive2["INT"] = "int"; - AvroPrimitive2["LONG"] = "long"; - AvroPrimitive2["FLOAT"] = "float"; - AvroPrimitive2["DOUBLE"] = "double"; - AvroPrimitive2["BYTES"] = "bytes"; - AvroPrimitive2["STRING"] = "string"; - })(AvroPrimitive || (AvroPrimitive = {})); - var AvroType = class _AvroType { - static { - __name(this, "AvroType"); - } - /** - * Determines the AvroType from the Avro Schema. - */ - static fromSchema(schema) { - if (typeof schema === "string") { - return _AvroType.fromStringSchema(schema); - } else if (Array.isArray(schema)) { - return _AvroType.fromArraySchema(schema); - } else { - return _AvroType.fromObjectSchema(schema); - } - } - static fromStringSchema(schema) { - switch (schema) { - case AvroPrimitive.NULL: - case AvroPrimitive.BOOLEAN: - case AvroPrimitive.INT: - case AvroPrimitive.LONG: - case AvroPrimitive.FLOAT: - case AvroPrimitive.DOUBLE: - case AvroPrimitive.BYTES: - case AvroPrimitive.STRING: - return new AvroPrimitiveType(schema); - default: - throw new Error(`Unexpected Avro type ${schema}`); - } - } - static fromArraySchema(schema) { - return new AvroUnionType(schema.map(_AvroType.fromSchema)); - } - static fromObjectSchema(schema) { - const type = schema.type; - try { - return _AvroType.fromStringSchema(type); - } catch (err) { - } - switch (type) { - case AvroComplex.RECORD: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.name) { - throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); - } - const fields = {}; - if (!schema.fields) { - throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); - } - for (const field of schema.fields) { - fields[field.name] = _AvroType.fromSchema(field.type); - } - return new AvroRecordType(fields, schema.name); - case AvroComplex.ENUM: - if (schema.aliases) { - throw new Error(`aliases currently is not supported, schema: ${schema}`); - } - if (!schema.symbols) { - throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } - return new AvroEnumType(schema.symbols); - case AvroComplex.MAP: - if (!schema.values) { - throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error.message}`); } - return new AvroMapType(_AvroType.fromSchema(schema.values)); - case AvroComplex.ARRAY: - // Unused today - case AvroComplex.FIXED: - // Unused today - default: - throw new Error(`Unexpected Avro type ${type} in ${schema}`); - } - } - }; - var AvroPrimitiveType = class extends AvroType { - static { - __name(this, "AvroPrimitiveType"); - } - constructor(primitive) { - super(); - this._primitive = primitive; - } - read(stream2, options = {}) { - switch (this._primitive) { - case AvroPrimitive.NULL: - return AvroParser.readNull(); - case AvroPrimitive.BOOLEAN: - return AvroParser.readBoolean(stream2, options); - case AvroPrimitive.INT: - return AvroParser.readInt(stream2, options); - case AvroPrimitive.LONG: - return AvroParser.readLong(stream2, options); - case AvroPrimitive.FLOAT: - return AvroParser.readFloat(stream2, options); - case AvroPrimitive.DOUBLE: - return AvroParser.readDouble(stream2, options); - case AvroPrimitive.BYTES: - return AvroParser.readBytes(stream2, options); - case AvroPrimitive.STRING: - return AvroParser.readString(stream2, options); - default: - throw new Error("Unknown Avro Primitive"); - } - } - }; - var AvroEnumType = class extends AvroType { - static { - __name(this, "AvroEnumType"); - } - constructor(symbols) { - super(); - this._symbols = symbols; - } - async read(stream2, options = {}) { - const value = await AvroParser.readInt(stream2, options); - return this._symbols[value]; - } - }; - var AvroUnionType = class extends AvroType { - static { - __name(this, "AvroUnionType"); - } - constructor(types) { - super(); - this._types = types; - } - async read(stream2, options = {}) { - const typeIndex = await AvroParser.readInt(stream2, options); - return this._types[typeIndex].read(stream2, options); - } - }; - var AvroMapType = class extends AvroType { - static { - __name(this, "AvroMapType"); - } - constructor(itemType) { - super(); - this._itemType = itemType; - } - read(stream2, options = {}) { - const readItemMethod = /* @__PURE__ */ __name((s, opts) => { - return this._itemType.read(s, opts); - }, "readItemMethod"); - return AvroParser.readMap(stream2, readItemMethod, options); - } - }; - var AvroRecordType = class extends AvroType { - static { - __name(this, "AvroRecordType"); - } - constructor(fields, name) { - super(); - this._fields = fields; - this._name = name; - } - async read(stream2, options = {}) { - const record = {}; - record["$schema"] = this._name; - for (const key in this._fields) { - if (Object.prototype.hasOwnProperty.call(this._fields, key)) { - record[key] = await this._fields[key].read(stream2, options); } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + }); + const stream2 = response.readableStreamBody; + await streamToBuffer(stream2, buffer, off - offset, chunkEnd - offset); + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return record; - } - }; - function arraysEqual(a, b) { - if (a === b) - return true; - if (a == null || b == null) - return false; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; ++i) { - if (a[i] !== b[i]) - return false; - } - return true; - } - __name(arraysEqual, "arraysEqual"); - var AvroReader = class { - static { - __name(this, "AvroReader"); - } - constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { - this._dataStream = dataStream; - this._headerStream = headerStream || dataStream; - this._initialized = false; - this._blockOffset = currentBlockOffset || 0; - this._objectIndex = indexWithinCurrentBlock || 0; - this._initialBlockOffset = currentBlockOffset || 0; - } - get blockOffset() { - return this._blockOffset; } - get objectIndex() { - return this._objectIndex; - } - async initialize(options = {}) { - const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal - }); - if (!arraysEqual(header, AVRO_INIT_BYTES)) { - throw new Error("Stream is not an Avro file."); - } - this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal - }); - const codec = this._metadata[AVRO_CODEC_KEY]; - if (!(codec === void 0 || codec === null || codec === "null")) { - throw new Error("Codecs are not supported"); - } - this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - }); - const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); - this._itemType = AvroType.fromSchema(schema); - if (this._blockOffset === 0) { - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - } - this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - }); - await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); - this._initialized = true; - if (this._objectIndex && this._objectIndex > 0) { - for (let i = 0; i < this._objectIndex; i++) { - await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); - this._itemsRemainingInBlock--; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); } + response.blobDownloadStream = void 0; + return response; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } - hasNext() { - return !this._initialized || this._itemsRemainingInBlock > 0; - } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* parseObjects_1() { - if (!this._initialized) { - yield tslib.__await(this.initialize(options)); + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } else if (isIpEndpointStyle(parsedUrl)) { + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } else { + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; } - while (this.hasNext()) { - const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal - })); - this._itemsRemainingInBlock--; - this._objectIndex++; - if (this._itemsRemainingInBlock === 0) { - const marker2 = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal - })); - this._blockOffset = this._initialBlockOffset + this._dataStream.position; - this._objectIndex = 0; - if (!arraysEqual(this._syncMarker, marker2)) { - throw new Error("Stream is not a valid Avro file."); - } - try { - this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal - })); - } catch (err) { - this._itemsRemainingInBlock = 0; - } - if (this._itemsRemainingInBlock > 0) { - yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); - } - } - yield yield tslib.__await(result); + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); } - }, "parseObjects_1")); - } - }; - var AvroReadable = class { - static { - __name(this, "AvroReadable"); - } - }; - var ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); - var AvroReadableFromStream = class extends AvroReadable { - static { - __name(this, "AvroReadableFromStream"); - } - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; + return { blobName, containerName }; + } catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } } - toUint8Array(data) { - if (typeof data === "string") { - return Buffer.from(data); + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource2, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return data; } - get position() { - return this._position; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); } - async read(size, options = {}) { - var _a; - if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw ABORT_ERROR; - } - if (size < 0) { - throw new Error(`size parameter should be positive: ${size}`); - } - if (size === 0) { - return new Uint8Array(); + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - if (!this._readable.readable) { - throw new Error("Stream no longer readable."); + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - const chunk = this._readable.read(size); - if (chunk) { - this._position += chunk.length; - return this.toUint8Array(chunk); - } else { - return new Promise((resolve, reject) => { - const cleanUp = /* @__PURE__ */ __name(() => { - this._readable.removeListener("readable", readableCallback); - this._readable.removeListener("error", rejectCallback); - this._readable.removeListener("end", rejectCallback); - this._readable.removeListener("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.removeEventListener("abort", abortHandler); - } - }, "cleanUp"); - const readableCallback = /* @__PURE__ */ __name(() => { - const callbackChunk = this._readable.read(size); - if (callbackChunk) { - this._position += callbackChunk.length; - cleanUp(); - resolve(this.toUint8Array(callbackChunk)); - } - }, "readableCallback"); - const rejectCallback = /* @__PURE__ */ __name(() => { - cleanUp(); - reject(); - }, "rejectCallback"); - const abortHandler = /* @__PURE__ */ __name(() => { - cleanUp(); - reject(ABORT_ERROR); - }, "abortHandler"); - this._readable.on("readable", readableCallback); - this._readable.once("error", rejectCallback); - this._readable.once("end", rejectCallback); - this._readable.once("close", rejectCallback); - if (options.abortSignal) { - options.abortSignal.addEventListener("abort", abortHandler); - } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); + throw e; + } finally { + span.end(); } } }; - var BlobQuickQueryStream = class extends stream.Readable { + var AppendBlobClient = class _AppendBlobClient extends BlobClient { static { - __name(this, "BlobQuickQueryStream"); + __name(this, "AppendBlobClient"); + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url2, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); } /** - * Creates an instance of BlobQuickQueryStream. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @param source - The current ReadableStream returned from getter - * @param options - + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - constructor(source, options = {}) { - super(); - this.avroPaused = true; - this.source = source; - this.onProgress = options.onProgress; - this.onError = options.onError; - this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); - this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + withSnapshot(snapshot2) { + return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } - _read() { - if (this.avroPaused) { - this.readInternal().catch((err) => { - this.emit("error", err); + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); + throw e; + } finally { + span.end(); } } - async readInternal() { - this.avroPaused = false; - let avroNext; - do { - avroNext = await this.avroIter.next(); - if (avroNext.done) { - break; - } - const obj = avroNext.value; - const schema = obj.$schema; - if (typeof schema !== "string") { - throw Error("Missing schema in avro record."); - } - switch (schema) { - case "com.microsoft.azure.storage.queryBlobContents.resultData": - { - const data = obj.data; - if (data instanceof Uint8Array === false) { - throw Error("Invalid data in avro result record."); - } - if (!this.push(Buffer.from(data))) { - this.avroPaused = true; - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.progress": - { - const bytesScanned = obj.bytesScanned; - if (typeof bytesScanned !== "number") { - throw Error("Invalid bytesScanned in avro progress record."); - } - if (this.onProgress) { - this.onProgress({ loadedBytes: bytesScanned }); - } - } - break; - case "com.microsoft.azure.storage.queryBlobContents.end": - if (this.onProgress) { - const totalBytes = obj.totalBytes; - if (typeof totalBytes !== "number") { - throw Error("Invalid totalBytes in avro end record."); - } - this.onProgress({ loadedBytes: totalBytes }); - } - this.push(null); - break; - case "com.microsoft.azure.storage.queryBlobContents.error": - if (this.onError) { - const fatal = obj.fatal; - if (typeof fatal !== "boolean") { - throw Error("Invalid fatal in avro error record."); - } - const name = obj.name; - if (typeof name !== "string") { - throw Error("Invalid name in avro error record."); - } - const description = obj.description; - if (typeof description !== "string") { - throw Error("Invalid description in avro error record."); - } - const position = obj.position; - if (typeof position !== "number") { - throw Error("Invalid position in avro error record."); - } - this.onError({ - position, - name, - isFatal: fatal, - description - }); - } - break; - default: - throw Error(`Unknown schema ${schema} in avro progress record.`); + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } - } while (!avroNext.done && !this.avroPaused); - } - }; - var BlobQueryResponse = class { - static { - __name(this, "BlobQueryResponse"); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates an instance of BlobQueryResponse. + * Seals the append blob, making it read only. * - * @param originalResponse - * @param options - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body2, contentLength2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Indicates that the service supports - * requests for partial file content. + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * - * @readonly + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - */ - get acceptRanges() { - return this.originalResponse.acceptRanges; + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } - /** - * Returns if it was previously specified - * for the file. - * - * @readonly - */ - get cacheControl() { - return this.originalResponse.cacheControl; + }; + var BlockBlobClient = class _BlockBlobClient extends BlobClient { + static { + __name(this, "BlockBlobClient"); } - /** - * Returns the value that was specified - * for the 'x-ms-content-disposition' header and specifies how to process the - * response. - * - * @readonly - */ - get contentDisposition() { - return this.originalResponse.contentDisposition; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url2, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); } /** - * Returns the value that was specified - * for the Content-Encoding request header. + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. * - * @readonly + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - get contentEncoding() { - return this.originalResponse.contentEncoding; + withSnapshot(snapshot2) { + return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * Returns the value that was specified - * for the Content-Language request header. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @readonly - */ - get contentLanguage() { - return this.originalResponse.contentLanguage; - } - /** - * The current sequence number for a - * page blob. This header is not returned for block blobs or append blobs. + * Quick query for a JSON or CSV formatted blob. * - * @readonly - */ - get blobSequenceNumber() { - return this.originalResponse.blobSequenceNumber; - } - /** - * The blob's type. Possible values include: - * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * Example usage (Node.js): * - * @readonly - */ - get blobType() { - return this.originalResponse.blobType; - } - /** - * The number of bytes present in the - * response body. + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); * - * @readonly - */ - get contentLength() { - return this.originalResponse.contentLength; - } - /** - * If the file has an MD5 hash and the - * request is to read the full file, this response header is returned so that - * the client can check for message content integrity. If the request is to - * read a specified range and the 'x-ms-range-get-content-md5' is set to - * true, then the request returns an MD5 hash for the range, as long as the - * range size is less than or equal to 4 MB. If neither of these sets of - * conditions is true, then no value is returned for the 'Content-MD5' - * header. + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` * - * @readonly + * @param query - + * @param options - */ - get contentMD5() { - return this.originalResponse.contentMD5; + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration) + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Indicates the range of bytes returned if - * the client requested a subset of the file by setting the Range request - * header. + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. * - * @readonly - */ - get contentRange() { - return this.originalResponse.contentRange; - } - /** - * The content type specified for the file. - * The default content type is 'application/octet-stream' + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. * - * @readonly - */ - get contentType() { - return this.originalResponse.contentType; - } - /** - * Conclusion time of the last attempted - * Copy File operation where this file was the destination file. This value - * can specify the time of a completed, aborted, or failed copy attempt. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @readonly - */ - get copyCompletedOn() { - return void 0; - } - /** - * String identifier for the last attempted Copy - * File operation where this file was the destination file. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. * - * @readonly - */ - get copyId() { - return this.originalResponse.copyId; - } - /** - * Contains the number of bytes copied and - * the total bytes in the source in the last attempted Copy File operation - * where this file was the destination file. Can show between 0 and - * Content-Length bytes copied. + * Example usage: * - * @readonly + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - get copyProgress() { - return this.originalResponse.copyProgress; + async upload(body2, contentLength2, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * URL up to 2KB in length that specifies the - * source file used in the last attempted Copy File operation where this file - * was the destination file. + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. * - * @readonly + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. */ - get copySource() { - return this.originalResponse.copySource; + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * State of the copy operation - * identified by 'x-ms-copy-id'. Possible values include: 'pending', - * 'success', 'aborted', 'failed' + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block * - * @readonly + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. */ - get copyStatus() { - return this.originalResponse.copyStatus; + async stageBlock(blockId2, body2, contentLength2, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Only appears when - * x-ms-copy-status is failed or pending. Describes cause of fatal or - * non-fatal copy operation failure. + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url * - * @readonly + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. */ - get copyStatusDescription() { - return this.originalResponse.copyStatusDescription; + async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * When a blob is leased, - * specifies whether the lease is of infinite or fixed duration. Possible - * values include: 'infinite', 'fixed'. + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list * - * @readonly + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. */ - get leaseDuration() { - return this.originalResponse.leaseDuration; + async commitBlockList(blocks2, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks2 }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Lease state of the blob. Possible - * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list * - * @readonly + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. */ - get leaseState() { - return this.originalResponse.leaseState; + async getBlockList(listType2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } + // High level functions /** - * The current lease status of the - * blob. Possible values include: 'locked', 'unlocked'. + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. * - * @readonly - */ - get leaseStatus() { - return this.originalResponse.leaseStatus; - } - /** - * A UTC date/time value generated by the service that - * indicates the time at which the response was initiated. + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. * - * @readonly + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - */ - get date() { - return this.originalResponse.date; + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The number of committed blocks - * present in the blob. This header is returned only for append blobs. + * ONLY AVAILABLE IN BROWSERS. * - * @readonly - */ - get blobCommittedBlockCount() { - return this.originalResponse.blobCommittedBlockCount; - } - /** - * The ETag contains a value that you can use to - * perform operations conditionally, in quotes. + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. * - * @readonly - */ - get etag() { - return this.originalResponse.etag; - } - /** - * The error code. + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. * - * @readonly - */ - get errorCode() { - return this.originalResponse.errorCode; - } - /** - * The value of this header is set to - * true if the file data and application metadata are completely encrypted - * using the specified algorithm. Otherwise, the value is set to false (when - * the file is unencrypted, or if only parts of the file/application metadata - * are encrypted). + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. * - * @readonly - */ - get isServerEncrypted() { - return this.originalResponse.isServerEncrypted; - } - /** - * If the blob has a MD5 hash, and if - * request contains range header (Range or x-ms-range), this response header - * is returned with the value of the whole blob's MD5 value. This value may - * or may not be equal to the value returned in Content-MD5 header, with the - * latter calculated from the requested range. + * @deprecated Use {@link uploadData} instead. * - * @readonly + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. */ - get blobContentMD5() { - return this.originalResponse.blobContentMD5; + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Returns the date and time the file was last - * modified. Any operation that modifies the file or its properties updates - * the last modified time. * - * @readonly - */ - get lastModified() { - return this.originalResponse.lastModified; - } - /** - * A name-value pair - * to associate with a file storage object. + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; - } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. * - * @readonly + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - get requestId() { - return this.originalResponse.requestId; + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength2 = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += contentLength2; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @readonly - */ - get clientRequestId() { - return this.originalResponse.clientRequestId; - } - /** - * Indicates the version of the File service used - * to execute the request. + * Uploads a local file in blocks to a block blob. * - * @readonly - */ - get version() { - return this.originalResponse.version; - } - /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. * - * @readonly - */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; - } - /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The response body as a browser Blob. - * Always undefined in node.js. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @readonly - */ - get blobBody() { - return void 0; - } - /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Uploads a Node.js Readable stream into block blob. * - * It will parse avor data returned by blob query. + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. * - * @readonly - */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : void 0; - } - /** - * The HTTP response. + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - get _response() { - return this.originalResponse._response; - } - }; - exports2.BlockBlobTier = void 0; - (function(BlockBlobTier) { - BlockBlobTier["Hot"] = "Hot"; - BlockBlobTier["Cool"] = "Cool"; - BlockBlobTier["Cold"] = "Cold"; - BlockBlobTier["Archive"] = "Archive"; - })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); - exports2.PremiumPageBlobTier = void 0; - (function(PremiumPageBlobTier) { - PremiumPageBlobTier["P4"] = "P4"; - PremiumPageBlobTier["P6"] = "P6"; - PremiumPageBlobTier["P10"] = "P10"; - PremiumPageBlobTier["P15"] = "P15"; - PremiumPageBlobTier["P20"] = "P20"; - PremiumPageBlobTier["P30"] = "P30"; - PremiumPageBlobTier["P40"] = "P40"; - PremiumPageBlobTier["P50"] = "P50"; - PremiumPageBlobTier["P60"] = "P60"; - PremiumPageBlobTier["P70"] = "P70"; - PremiumPageBlobTier["P80"] = "P80"; - })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); - function toAccessTier(tier2) { - if (tier2 === void 0) { - return void 0; - } - return tier2; - } - __name(toAccessTier, "toAccessTier"); - function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); - } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; - } - } - __name(ensureCpkIfSpecified, "ensureCpkIfSpecified"); - exports2.StorageBlobAudience = void 0; - (function(StorageBlobAudience) { - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; - })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); - function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; - } - __name(getBlobServiceAccountAudience, "getBlobServiceAccountAudience"); - function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { - pageRange, - clearRange, - _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange - } }) - }); - } - __name(rangeResponseFromModel, "rangeResponseFromModel"); - var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { - static { - __name(this, "BlobBeginCopyFromUrlPoller"); - } - constructor(options) { - const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; + async uploadStream(stream2, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { - blobClient, - copySource: copySource2, - startCopyFromURLOptions - })); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler( + stream2, + bufferSize, + maxConcurrency, + async (body2, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body2, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil(maxConcurrency / 4 * 3) + ); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.intervalInMs = intervalInMs; - } - delay() { - return coreHttp.delay(this.intervalInMs); } }; - var cancel = /* @__PURE__ */ __name(async function cancel2(options = {}) { - const state = this.state; - const { copyId: copyId2 } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId2) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); + var PageBlobClient = class _PageBlobClient extends BlobClient { + static { + __name(this, "PageBlobClient"); } - await state.blobClient.abortCopyFromURL(copyId2, { - abortSignal: options.abortSignal - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - }, "cancel"); - var update = /* @__PURE__ */ __name(async function update2(options = {}) { - const state = this.state; - const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - } else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { - options.fireProgress(state); - } else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - } catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); - }, "update"); - var toString = /* @__PURE__ */ __name(function toString2() { - return JSON.stringify({ state: this.state }, (key, value) => { - if (key === "blobClient") { - return void 0; + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - return value; - }); - }, "toString"); - function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: Object.assign({}, state), - cancel, - toString, - update - }; - } - __name(makeBlobBeginCopyFromURLPollOperation, "makeBlobBeginCopyFromURLPollOperation"); - function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + super(url2, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); } - return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; - } - __name(rangeToString, "rangeToString"); - var BatchStates; - (function(BatchStates2) { - BatchStates2[BatchStates2["Good"] = 0] = "Good"; - BatchStates2[BatchStates2["Error"] = 1] = "Error"; - })(BatchStates || (BatchStates = {})); - var Batch = class { - static { - __name(this, "Batch"); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot2) { + return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * Creates an instance of Batch. - * @param concurrency - + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. */ - constructor(concurrency = 5) { - this.actives = 0; - this.completed = 0; - this.offset = 0; - this.operations = []; - this.state = BatchStates.Good; - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); } /** - * Add a operation into queue. + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param operation - + * @param size - size of the page blob. + * @param options - */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } catch (error) { - this.emitter.emit("error", error); + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } - }); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Start execute operations in the queue. + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); + async uploadPages(body2, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); - }); + throw e; + } finally { + span.end(); + } } /** - * Get next operation to be executed. Return null when reaching ends. + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return null; } /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; - } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; - } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } else { - return; - } + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } - }; - var BuffersStream = class extends stream.Readable { - static { - __name(this, "BuffersStream"); - } /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } /** - * Internal _read() that will be called when the stream wants to pull more data in. + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param size - Optional. The size of data to be read + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); - } - if (!size) { - size = this.readableHighWaterMark; - } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; - } else { - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; - } else { - this.byteOffsetInCurrentBuffer = end; - } - this.pushedBytesLength += remaining; - i += remaining; - } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } else if (outBuffers.length === 1) { - this.push(outBuffers[0]); + async listPageRangesSegment(offset = 0, count, marker2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } - }; - var maxBufferLength = require("buffer").constants.MAX_LENGTH; - var PooledBuffer = class { - static { - __name(this, "PooledBuffer"); - } - constructor(capacity, buffers, totalLength) { - this.buffers = []; - this.capacity = capacity; - this._size = 0; - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker2 || marker2 === void 0) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); + marker2 = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker2); } - this.buffers.push(Buffer.allocUnsafe(len)); - } - if (buffers) { - this.fill(buffers, totalLength); - } + }, "listPageRangeItemSegments_1")); } /** - * The size of the data contained in the pooled buffers. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - get size() { - return this._size; + listPageRangeItems(offset = 0, count, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItems_1() { + var e_1, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_1) throw e_1.error; + } + } + }, "listPageRangeItems_1")); } /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, "byPage") + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; - } - if (targetOffset === target.length) { - j++; - targetOffset = 0; - } - } - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } /** - * Get the readable stream assembled from all the data in the internal buffers. + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); - } - }; - var BufferScheduler = class { - static { - __name(this, "BufferScheduler"); + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset, + count + }), marker: marker2, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates an instance of BufferScheduler. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - this.emitter = new events.EventEmitter(); - this.offset = 0; - this.isStreamEnd = false; - this.isError = false; - this.executingOutgoingHandlers = 0; - this.numBuffers = 0; - this.unresolvedDataArray = []; - this.unresolvedLength = 0; - this.incoming = []; - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker2 || marker2 === void 0) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); + marker2 = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker2); + } + }, "listPageRangeDiffItemSegments_1")); } /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); - }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); - }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); - } else if (this.unresolvedLength >= this.bufferSize) { - return; - } else { - resolve(); - } + } catch (e_2_1) { + e_2 = { error: e_2_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_2) throw e_2.error; } - }); - }); + } + }, "listPageRangeDiffItems_1")); } /** - * Insert a new data into unresolved array. + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param data - + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, "byPage") + }; } /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - shiftBufferFromUnresolvedDataArray(buffer) { - if (!buffer) { - buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } else { - buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl2, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.unresolvedLength -= buffer.size; - return buffer; } /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties * - * @returns Return false when buffers in incoming are not enough, else true. + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer; - if (this.incoming.length > 0) { - buffer = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer); - } else { - if (this.numBuffers < this.maxBuffers) { - buffer = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } else { - return false; - } - } - this.outgoing.push(buffer); - this.triggerOutgoingHandlers(); + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return true; - } - /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. - */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); - } - } while (buffer); } /** - * Trigger a outgoing handler for a buffer shifted from outgoing. + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * - * @param buffer - + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; + async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } catch (err) { - this.emitter.emit("error", err); - return; + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); } /** - * Return buffer used by outgoing handler into incoming. + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots * - * @param buffer - + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. */ - reuseBuffer(buffer) { - this.incoming.push(buffer); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); + async startCopyIncremental(copySource2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } }; - async function streamToBuffer(stream2, buffer, offset, end, encoding) { - let pos = 0; - const count = end - offset; - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream2.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve(); - return; - } - let chunk = stream2.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream2.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); - }); - stream2.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); - } - __name(streamToBuffer, "streamToBuffer"); - async function streamToBuffer2(stream2, buffer, encoding) { - let pos = 0; - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream2.on("end", () => { - resolve(pos); - }); - stream2.on("error", reject); - }); + async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); } - __name(streamToBuffer2, "streamToBuffer2"); - async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); - }); - ws.on("close", resolve); - rs.pipe(ws); - }); + __name(getBodyAsText, "getBodyAsText"); + function utf8ByteLength(str) { + return Buffer.byteLength(str); } - __name(readStreamToLocalFile, "readStreamToLocalFile"); - var fsStat = util__namespace.promisify(fs__namespace.stat); - var fsCreateReadStream = fs__namespace.createReadStream; - var BlobClient = class _BlobClient extends StorageClient { + __name(utf8ByteLength, "utf8ByteLength"); + var HTTP_HEADER_DELIMITER = ": "; + var SPACE_DELIMITER = " "; + var NOT_FOUND = -1; + var BatchResponseParser = class { static { - __name(this, "BlobClient"); + __name(this, "BatchResponseParser"); + } + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - options = options || {}; - let pipeline; - let url2; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); + const subResponseCount = subResponses.length; + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; + } + if (responseLine.trim() === "") { + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; + } + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; } - pipeline = newPipeline(sharedKeyCredential, options); } else { - throw new Error("Account connection string is only supported in Node.js environment"); + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + } + if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } else { + subResponsesSucceededCount++; } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } - super(url2, pipeline); - ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); - } - /** - * The name of the blob. - */ - get name() { - return this._name; + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount, + subResponsesFailedCount + }; } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; + }; + var MutexLockStatus; + (function(MutexLockStatus2) { + MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; + })(MutexLockStatus || (MutexLockStatus = {})); + var Mutex = class { + static { + __name(this, "Mutex"); } /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. * - * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * @param key - lock key */ - withSnapshot(snapshot2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); } /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. + * Unlock a key. * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. + * @param key - */ - withVersion(versionId2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); } - /** - * Creates a AppendBlobClient object. - * - */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); + static onUnlockEvent(key, handler) { + if (this.listeners[key] === void 0) { + this.listeners[key] = [handler]; + } else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } + }; + Mutex.keys = {}; + Mutex.listeners = {}; + var BlobBatch = class { + static { + __name(this, "BlobBatch"); + } + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); } /** - * Creates a BlockBlobClient object. - * + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); } /** - * Creates a PageBlobClient object. - * + * Get assembled HTTP request body for sub requests. */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. - * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob - * - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. - * - * - * Example usage (Node.js): - * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); - * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` - * - * Example usage (browser): - * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded - * ); - * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); - * } - * ``` + * Get sub requests that are added into the batch request. */ - async download(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? void 0 : options.onProgress - // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - if (!coreHttp.isNode) { - return wrappedRes; - } - if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === void 0) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a2; - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url2; + let credential; + if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrOptions))) { + url2 = urlOrBlobClient; + credential = credentialOrOptions; + } else if (urlOrBlobClient instanceof BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url2; + let credential; + let tier2; + if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrTier))) { + url2 = urlOrBlobClient; + credential = credentialOrTier; + tier2 = tierOrOptions; + } else if (urlOrBlobClient instanceof BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier2 = credentialOrTier; + options = tierOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); }); } catch (e) { span.setStatus({ @@ -84445,86 +82833,243 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } + }; + var InnerBatchRequest = class { + static { + __name(this, "InnerBatchRequest"); + } + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + this.boundary = `batch_${tempGuid}`; + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = /* @__PURE__ */ new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); + factories[1] = new BatchHeaderFilterPolicyFactory(); + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` + // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + const path2 = getURLPath(subRequest.url); + if (!path2 || path2 === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } + }; + var BatchRequestAssemblePolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "BatchRequestAssemblePolicy"); + } + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders() + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; + } + }; + var BatchRequestAssemblePolicyFactory = class { + static { + __name(this, "BatchRequestAssemblePolicyFactory"); + } + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } + }; + var BatchHeaderFilterPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "BatchHeaderFilterPolicy"); + } + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); + } + return this._nextPolicy.sendRequest(request); + } + }; + var BatchHeaderFilterPolicyFactory = class { + static { + __name(this, "BatchHeaderFilterPolicyFactory"); + } + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } + }; + var BlobBatchClient = class { + static { + __name(this, "BlobBatchClient"); + } + constructor(url2, credentialOrPipeline, options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (!credentialOrPipeline) { + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url2, pipeline.toServiceClientOptions()); + const path2 = getURLPath(url2); + if (path2 && path2 !== "/") { + this.serviceOrContainerContext = new Container(storageClientContext); + } else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. - * - * @param options - options to Exists operation. + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - return false; - } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - return true; + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); } + return this.submitBatch(batch); } - /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. - * - * @param options - Optional options to Get Properties operation. - */ - async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } } + return this.submitBatch(batch); } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * Submit batch request which consists of multiple subrequests. * - * @param options - Optional options to Blob Delete operation. + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - */ - async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); - options.conditions = options.conditions || {}; + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const batchRequestBody = batchRequest.getHttpRequestBody(); + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84535,110 +83080,78 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob - * - * @param options - Optional options to Blob Delete operation. - */ - async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + }; + var ContainerClient = class extends StorageClient { + static { + __name(this, "ContainerClient"); + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + } else { + throw new Error("Expecting non-empty strings for containerName parameter"); } + super(url2, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); } /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob - * - * @param options - Optional options to Blob Undelete operation. + * The name of the container. */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + get containerName() { + return this._containerName; } /** - * Sets system properties on the blob. + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @param options - Options to Container Create operation. * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. - */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * Example usage: * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` */ - async setMetadata(metadata2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); - options.conditions = options.conditions || {}; + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84650,20 +83163,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @param tags - * @param options - */ - async setTags(tags2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags2) })); + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -84674,18 +83194,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Gets the tags associated with the underlying blob. + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. * * @param options - */ - async getTags(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + }); + return true; } catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence" + }); + return false; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -84696,138 +83228,67 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. + * Creates a {@link BlobClient} * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Creates a read-only snapshot of a blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * Creates an {@link AppendBlobClient} * - * @param options - Optional options to the Blob Create Snapshot operation. + * @param blobName - An append blob name */ - async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. - * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob - * - * Example using automatic polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using manual polling: - * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); - * } - * const result = copyPoller.getResult(); - * ``` + * Creates a {@link BlockBlobClient} * - * Example using progress updates: + * @param blobName - A block blob name * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); - * } - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` * - * Example using a changing polling interval (default 15 seconds): + * Example usage: * * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` - * - * Example using copy cancellation: + * const content = "Hello world!"; * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * // cancel operation after starting it. - * try { - * await copyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); - * } - * } + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. */ - async beginCopyFromURL(copySource2, options = {}) { - const client = { - abortCopyFromURL: /* @__PURE__ */ __name((...args) => this.abortCopyFromURL(...args), "abortCopyFromURL"), - getProperties: /* @__PURE__ */ __name((...args) => this.getProperties(...args), "getProperties"), - startCopyFromURL: /* @__PURE__ */ __name((...args) => this.startCopyFromURL(...args), "startCopyFromURL") - }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource: copySource2, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options - }); - await poller.poll(); - return poller; + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. */ - async abortCopyFromURL(copyId2, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); try { - return await this.blobContext.abortCopyFromURL(copyId2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84839,25 +83300,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - + * @param options - Options to Container Delete operation. */ - async syncCopyFromURL(copySource2, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); try { - return await this.blobContext.copyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84869,22 +83324,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. + * @param options - Options to Container Delete operation. */ - async setAccessTier(tier2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { - return await this.blobContext.setTier(toAccessTier(tier2), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -84894,82 +83353,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata2, options = {}) { + if (!options.conditions) { + options.conditions = {}; } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); - } - } - if (!buffer) { - try { - buffer = Buffer.alloc(count); - } catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error.message}`); - } - } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { - batch.addOperation(async () => { - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) - }); - const stream2 = response.readableStreamBody; - await streamToBuffer(stream2, buffer, off - offset, chunkEnd - offset); - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); - } - await batch.do(); - return buffer; + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84981,30 +83386,54 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (const identifier of response) { + let accessPolicy = void 0; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id + }); } - response.blobDownloadStream = void 0; - return response; + return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85015,61 +83444,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } else if (isIpEndpointStyle(parsedUrl)) { - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; - } else { - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return { blobName, containerName }; - } catch (error) { - throw new Error("Unable to extract blobName and containerName with provided information."); - } - } /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. */ - async startCopyFromURL(copySource2, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + async setAccessPolicy(access2, containerAcl2, options = {}) { options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); try { - return await this.blobContext.startCopyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + const acl = []; + for (const identifier of containerAcl2 || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" + }, + id: identifier.id + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access: access2, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85081,34 +83488,74 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Only available for BlobClient constructed with a shared key credential. + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); - }); + async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); + return { + blockBlobClient, + response + }; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Delete the immutablility policy on the blob. + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @param options - Optional options to delete immutability policy on the blob. + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85120,14 +83567,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Set immutablility policy on the blob. + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @param options - Optional options to set immutability policy on the blob. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + async listBlobFlatSegment(marker2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85139,14 +83596,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Set legal hold on the blob. + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @param options - Optional options to set legal hold on the blob. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + async listBlobHierarchySegment(delimiter2, marker2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85157,817 +83629,1058 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - }; - var AppendBlobClient = class _AppendBlobClient extends BlobClient { - static { - __name(this, "AppendBlobClient"); + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker2 || marker2 === void 0) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); + marker2 = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker2); + } + }, "listSegments_1")); } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { + var e_1, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_1) throw e_1.error; } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url2, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + }, "listItems_1")); } /** - * Creates a new AppendBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Returns an async iterable iterator to list all the blobs + * under the specified account. * - * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot2) { - return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); - } - /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @param options - Options to the Append Block Create operation. + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` * + * Example using `iter.next()`: * - * Example usage: + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: * * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); - * await appendBlobClient.create(); + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. */ - async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + listBlobsFlat(options = {}) { + const include2 = []; + if (options.includeCopy) { + include2.push("copy"); + } + if (options.includeDeleted) { + include2.push("deleted"); + } + if (options.includeMetadata) { + include2.push("metadata"); + } + if (options.includeSnapshots) { + include2.push("snapshots"); + } + if (options.includeVersions) { + include2.push("versions"); + } + if (options.includeUncommitedBlobs) { + include2.push("uncommittedblobs"); + } + if (options.includeTags) { + include2.push("tags"); + } + if (options.includeDeletedWithVersions) { + include2.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include2.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include2.push("legalhold"); } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, "byPage") + }; } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * - * @param options - + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + listHierarchySegments(delimiter2, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker2 || marker2 === void 0) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); + marker2 = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker2); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + }, "listHierarchySegments_1")); } /** - * Seals the append blob, making it read only. + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * - * @param options - + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); - options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + listItemsByHierarchy(delimiter2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItemsByHierarchy_1() { + var e_2, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix2 of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } catch (e_2_1) { + e_2 = { error: e_2_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_2) throw e_2.error; + } + } + }, "listItemsByHierarchy_1")); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * + * Example using `for await` syntax: * - * Example usage: + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: * * ```js - * const content = "Hello World!"; + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * Example using `byPage()`: * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); - * await existingAppendBlobClient.appendBlock(content, content.length); + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - async appendBlock(body2, contentLength2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + listBlobsByHierarchy(delimiter2, options = {}) { + if (delimiter2 === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include2 = []; + if (options.includeCopy) { + include2.push("copy"); + } + if (options.includeDeleted) { + include2.push("deleted"); + } + if (options.includeMetadata) { + include2.push("metadata"); + } + if (options.includeSnapshots) { + include2.push("snapshots"); + } + if (options.includeVersions) { + include2.push("versions"); + } + if (options.includeUncommitedBlobs) { + include2.push("uncommittedblobs"); + } + if (options.includeTags) { + include2.push("tags"); + } + if (options.includeDeletedWithVersions) { + include2.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include2.push("immutabilitypolicy"); } + if (options.includeLegalHold) { + include2.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, "byPage") + }; } /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block - * @param options - + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message - }); - throw e; - } finally { - span.end(); - } - } - }; - var BlockBlobClient = class _BlockBlobClient extends BlobClient { - static { - __name(this, "BlockBlobClient"); - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + }); + throw e; + } finally { + span.end(); } - super(url2, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); } /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - withSnapshot(snapshot2) { - return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { + let response; + if (!!marker2 || marker2 === void 0) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); + response.blobs = response.blobs || []; + marker2 = response.continuationToken; + yield yield tslib.__await(response); + } while (marker2); + } + }, "findBlobsByTagsSegments_1")); } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Returns an AsyncIterableIterator for blobs. * - * Quick query for a JSON or CSV formatted blob. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } catch (e_3_1) { + e_3 = { error: e_3_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_3) throw e_3.error; + } + } + }, "findBlobsByTagsItems_1")); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. * - * Example usage (Node.js): + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: * * ```js - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); * } * ``` * - * @param query - - * @param options - + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - async query(query, options = {}) { - var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, "byPage") + }; + } + getContainerNameFromUrl() { + let containerName; try { - if (!coreHttp.isNode) { - throw new Error("This operation currently is only supported in Node.js."); + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + containerName = parsedUrl.getPath().split("/")[1]; + } else if (isIpEndpointStyle(parsedUrl)) { + containerName = parsedUrl.getPath().split("/")[2]; + } else { + containerName = parsedUrl.getPath().split("/")[1]; } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } catch (error) { + throw new Error("Unable to extract containerName with provided information."); } } /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. + * Only available for ContainerClient constructed with a shared key credential. * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. * - * Example usage: + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * - * ```js - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` + * @returns A new BlobBatchClient object for this container. */ - async upload(body2, contentLength2, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + }; + var AccountSASPermissions = class _AccountSASPermissions { + static { + __name(this, "AccountSASPermissions"); + } + constructor() { + this.read = false; + this.write = false; + this.delete = false; + this.deleteVersion = false; + this.list = false; + this.add = false; + this.create = false; + this.update = false; + this.process = false; + this.tag = false; + this.filter = false; + this.setImmutabilityPolicy = false; + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new _AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } } + return accountSASPermissions; } /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. + * @param permissionLike - */ - async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + static from(permissionLike) { + const accountSASPermissions = new _AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; } + return accountSASPermissions; } /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. */ - async stageBlock(blockId2, body2, contentLength2, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } + }; + var AccountSASResourceTypes = class _AccountSASResourceTypes { + static { + __name(this, "AccountSASResourceTypes"); + } + constructor() { + this.service = false; + this.container = false; + this.object = false; } /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. + * @param resourceTypes - */ - async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + static parse(resourceTypes) { + const accountSASResourceTypes = new _AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } + return accountSASResourceTypes; } /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. */ - async commitBlockList(blocks2, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks2 }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); } + return resourceTypes.join(""); + } + }; + var AccountSASServices = class _AccountSASServices { + static { + __name(this, "AccountSASServices"); + } + constructor() { + this.blob = false; + this.file = false; + this.queue = false; + this.table = false; } /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. + * @param services - */ - async getBlockList(listType2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; + static parse(services) { + const accountSASServices = new _AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); } - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); } + return accountSASServices; } - // High level functions /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. - * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * Converts the given services to a string. * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (coreHttp.isNode) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); - } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); } + if (this.file) { + services.push("f"); + } + return services.join(""); } - /** - * ONLY AVAILABLE IN BROWSERS. - * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. - * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. - * - * @deprecated Use {@link uploadData} instead. - * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. - */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + }; + function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version4 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version4 < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version4 < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version4 < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version4 >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version4, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "" + // Account SAS requires an additional newline character + ].join("\n"); + } else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version4, + "" + // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version4, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope); + } + __name(generateAccountSASQueryParameters, "generateAccountSASQueryParameters"); + var BlobServiceClient = class _BlobServiceClient extends StorageClient { + static { + __name(this, "BlobServiceClient"); + } + constructor(url2, credentialOrPipeline, options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } else { + pipeline = newPipeline(new AnonymousCredential(), options); } + super(url2, pipeline); + this.serviceContext = new Service(this.storageClientContext); } /** * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. - * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * Creates an instance of BlobServiceClient from connection string. * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + static fromConnectionString(connectionString, options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); } + const pipeline = newPipeline(sharedKeyCredential, options); + return new _BlobServiceClient(extractedCreds.url, pipeline); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength2 = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += contentLength2; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress - }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + } else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Creates a {@link ContainerClient} object * - * Uploads a local file in blocks to a block blob. + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. + * Example usage: * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. */ - async uploadStream(stream2, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler( - stream2, - bufferSize, - maxConcurrency, - async (body2, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body2, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil(maxConcurrency / 4 * 3) - ); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse + }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85978,79 +84691,74 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - }; - var PageBlobClient = class _PageBlobClient extends BlobClient { - static { - __name(this, "PageBlobClient"); - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - super(url2, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); } /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. */ - withSnapshot(snapshot2) { - return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ + deletedContainerName: deletedContainerName2, + deletedContainerVersion: deletedContainerVersion2 + }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Rename an existing Blob Container. * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. */ - async create(size, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + const containerClient = this.getContainerClient(destinationContainerName); + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86062,29 +84770,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * - * @param size - size of the page blob. - * @param options - + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. */ - async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -86095,24 +84792,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. */ - async uploadPages(body2, offset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86124,29 +84815,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86158,20 +84838,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Frees the specified pages from the page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - async clearPages(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86183,20 +84862,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. */ - async getPageRanges(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + async listContainersSegment(marker2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86208,22 +84890,36 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async listPageRangesSegment(offset = 0, count, marker2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86235,46 +84931,51 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. + * @param options - Options to find blobs by tags. */ - listPageRangeItemSegments(offset = 0, count, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItemSegments_1() { - let getPageRangeItemSegmentsResponse; + findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { + let response; if (!!marker2 || marker2 === void 0) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); + response.blobs = response.blobs || []; + marker2 = response.continuationToken; + yield yield tslib.__await(response); } while (marker2); } - }, "listPageRangeItemSegments_1")); + }, "findBlobsByTagsSegments_1")); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator for blobs. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - listPageRangeItems(offset = 0, count, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItems_1() { + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { var e_1, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; @@ -86285,22 +84986,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_1) throw e_1.error; } } - }, "listPageRangeItems_1")); + }, "findBlobsByTagsItems_1")); } /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); * } * ``` * @@ -86308,11 +85009,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iter = pageBlobClient.listPageRanges(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); * } * ``` * @@ -86321,9 +85022,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * } * ``` @@ -86332,35 +85035,41 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - listPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeItems(offset, count, options); + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -86378,112 +85087,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, "byPage") }; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. - */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * getPageRangesDiffSegment returns a single segment of page ranges starting from the - * specified Marker for difference between previous snapshot and the target page blob. - * Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ - offset, - count - }), marker: marker2, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} - * + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. */ - listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItemSegments_1() { - let getPageRangeItemSegmentsResponse; + listSegments(marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { + let listContainersSegmentResponse; if (!!marker2 || marker2 === void 0) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker2 = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); } while (marker2); } - }, "listPageRangeDiffItemSegments_1")); + }, "listSegments_1")); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator for Container Items * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param options - Options to list containers operation. */ - listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItems_1() { + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { var e_2, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; @@ -86494,22 +85139,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_2) throw e_2.error; } } - }, "listPageRangeDiffItems_1")); + }, "listItems_1")); } /** - * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to list all the containers + * under the specified account. * - * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * .byPage() returns an async iterable iterator to list the containers in pages. * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); * } * ``` * @@ -86517,11 +85160,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iter = pageBlobClient.listPageRangesDiff(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); * } * ``` * @@ -86530,9 +85173,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } * } * } * ``` @@ -86541,36 +85186,51 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } * } * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Ranges operation. + * + * @param options - Options to list containers. * @returns An asyncIterableIterator that supports paging. */ - listPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = void 0; + } + const include2 = []; + if (options.includeDeleted) { + include2.push("deleted"); + } + if (options.includeMetadata) { + include2.push("metadata"); + } + if (options.includeSystem) { + include2.push("system"); + } + const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItems(listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -86588,75 +85248,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, "byPage") }; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. - */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl2, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Resizes the page blob to the specified size (which must be a multiple of 512). - * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. * - * @param size - Target size - * @param options - Options to the Page Blob Resize operation. - * @returns Response data for the Page Blob Resize operation. - */ - async resize(size, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Sets a page blob's sequence number. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key * - * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. - * @param sequenceNumber - Required if sequenceNumberAction is max or update - * @param options - Options to the Page Blob Update Sequence Number operation. - * @returns Response data for the Page Blob Update Sequence Number operation. + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + async getUserDelegationKey(startsOn, expiresOn2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn2, false) + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86668,9327 +85292,9764 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. - * The snapshot is copied such that only the differential changes between the previously - * copied snapshot are transferred to the destination. - * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. - * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * Creates a BlobBatchClient object to conduct batch operations. * - * @param copySource - Specifies the name of the source page blob snapshot. For example, - * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Options to the Page Blob Copy Incremental operation. - * @returns Response data for the Page Blob Copy Incremental operation. - */ - async startCopyIncremental(copySource2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - }; - async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); - buffer = buffer.slice(0, responseLength); - return buffer.toString(); - } - __name(getBodyAsText, "getBodyAsText"); - function utf8ByteLength(str) { - return Buffer.byteLength(str); - } - __name(utf8ByteLength, "utf8ByteLength"); - var HTTP_HEADER_DELIMITER = ": "; - var SPACE_DELIMITER = " "; - var NOT_FOUND = -1; - var BatchResponseParser = class { - static { - __name(this, "BatchResponseParser"); - } - constructor(batchResponse, subRequests) { - if (!batchResponse || !batchResponse.contentType) { - throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); - } - if (!subRequests || subRequests.size === 0) { - throw new RangeError("Invalid state: subRequests is not provided or size is 0."); - } - this.batchResponse = batchResponse; - this.subRequests = subRequests; - this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; - } - // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await getBodyAsText(this.batchResponse); - const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); - const subResponseCount = subResponses.length; - if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - if (responseLine.startsWith(HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; - } - if (responseLine.trim() === "") { - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; - } - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } - } else { - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; - } - } - if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); - } - if (subRespFailed) { - subResponsesFailedCount++; - } else { - subResponsesSucceededCount++; - } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount, - subResponsesFailedCount - }; - } - }; - var MutexLockStatus; - (function(MutexLockStatus2) { - MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; - MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; - })(MutexLockStatus || (MutexLockStatus = {})); - var Mutex = class { - static { - __name(this, "Mutex"); - } - /** - * Lock for a specific key. If the lock has been acquired by another customer, then - * will wait until getting the lock. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * - * @param key - lock key + * @returns A new BlobBatchClient object for this service. */ - static async lock(key) { - return new Promise((resolve) => { - if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - } else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - }); - } - }); + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } /** - * Unlock a key. + * Only available for BlobServiceClient constructed with a shared key credential. * - * @param key - - */ - static async unlock(key) { - return new Promise((resolve) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve(); - }); - } - static onUnlockEvent(key, handler) { - if (this.listeners[key] === void 0) { - this.listeners[key] = [handler]; - } else { - this.listeners[key].push(handler); - } - } - static emitUnlockEvent(key) { - if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { - const handler = this.listeners[key].shift(); - setImmediate(() => { - handler.call(this); - }); - } - } - }; - Mutex.keys = {}; - Mutex.listeners = {}; - var BlobBatch = class { - static { - __name(this, "BlobBatch"); - } - constructor() { - this.batch = "batch"; - this.batchRequest = new InnerBatchRequest(); - } - /** - * Get the value of Content-Type for a batch request. - * The value must be multipart/mixed with a batch boundary. - * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 - */ - getMultiPartContentType() { - return this.batchRequest.getMultipartContentType(); - } - /** - * Get assembled HTTP request body for sub requests. - */ - getHttpRequestBody() { - return this.batchRequest.getHttpRequestBody(); - } - /** - * Get sub requests that are added into the batch request. + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - getSubRequests() { - return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } finally { - await Mutex.unlock(this.batch); - } - } - setBatchType(batchType) { - if (!this.batchType) { - this.batchType = batchType; - } - if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); - } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url2; - let credential; - if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrOptions))) { - url2 = urlOrBlobClient; - credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url2; - let credential; - let tier2; - if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrTier))) { - url2 = urlOrBlobClient; - credential = credentialOrTier; - tier2 = tierOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier2 = credentialOrTier; - options = tierOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } + if (expiresOn2 === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + } + const sas = generateAccountSASQueryParameters(Object.assign({ + permissions, + expiresOn: expiresOn2, + resourceTypes, + services: AccountSASServices.parse("b").toString() + }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); } }; - var InnerBatchRequest = class { + exports2.KnownEncryptionAlgorithmType = void 0; + (function(KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; + })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); + Object.defineProperty(exports2, "BaseRequestPolicy", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.BaseRequestPolicy; + }, "get") + }); + Object.defineProperty(exports2, "HttpHeaders", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.HttpHeaders; + }, "get") + }); + Object.defineProperty(exports2, "RequestPolicyOptions", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.RequestPolicyOptions; + }, "get") + }); + Object.defineProperty(exports2, "RestError", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.RestError; + }, "get") + }); + Object.defineProperty(exports2, "WebResource", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.WebResource; + }, "get") + }); + Object.defineProperty(exports2, "deserializationPolicy", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.deserializationPolicy; + }, "get") + }); + exports2.AccountSASPermissions = AccountSASPermissions; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + exports2.AccountSASServices = AccountSASServices; + exports2.AnonymousCredential = AnonymousCredential; + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + exports2.AppendBlobClient = AppendBlobClient; + exports2.BlobBatch = BlobBatch; + exports2.BlobBatchClient = BlobBatchClient; + exports2.BlobClient = BlobClient; + exports2.BlobLeaseClient = BlobLeaseClient; + exports2.BlobSASPermissions = BlobSASPermissions; + exports2.BlobServiceClient = BlobServiceClient; + exports2.BlockBlobClient = BlockBlobClient; + exports2.ContainerClient = ContainerClient; + exports2.ContainerSASPermissions = ContainerSASPermissions; + exports2.Credential = Credential; + exports2.CredentialPolicy = CredentialPolicy; + exports2.PageBlobClient = PageBlobClient; + exports2.Pipeline = Pipeline; + exports2.SASQueryParameters = SASQueryParameters; + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + exports2.StorageOAuthScopes = StorageOAuthScopes; + exports2.StorageRetryPolicy = StorageRetryPolicy; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; + exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; + exports2.isPipelineLike = isPipelineLike; + exports2.logger = logger; + exports2.newPipeline = newPipeline; + } +}); + +// ../node_modules/@actions/cache/lib/internal/shared/errors.js +var require_errors2 = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; + var FilesNotFoundError = class extends Error { static { - __name(this, "InnerBatchRequest"); - } - constructor() { - this.operationCount = 0; - this.body = ""; - const tempGuid = coreHttp.generateUuid(); - this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; - this.batchRequestEnding = `--${this.boundary}--`; - this.subRequests = /* @__PURE__ */ new Map(); + __name(this, "FilesNotFoundError"); } - /** - * Create pipeline to assemble sub requests. The idea here is to use existing - * credential and serialization/deserialization components, with additional policies to - * filter unnecessary headers, assemble sub requests into request's body - * and intercept request from going to wire. - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. - */ - createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); - const factories = new Array(policyFactoryLength); - factories[0] = coreHttp.deserializationPolicy(); - factories[1] = new BatchHeaderFilterPolicyFactory(); - if (!isAnonymousCreds) { - factories[2] = coreHttp.isTokenCredential(credential) ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) : credential; + constructor(files = []) { + let message = "No files were found to upload"; + if (files.length > 0) { + message += `: ${files.join(", ")}`; } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); - return new Pipeline(factories, {}); + super(message); + this.files = files; + this.name = "FilesNotFoundError"; } - appendSubRequestToBody(request) { - this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` - // sub request start line with method - ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; - } - this.body += HTTP_LINE_ENDING; + }; + exports2.FilesNotFoundError = FilesNotFoundError; + var InvalidResponseError = class extends Error { + static { + __name(this, "InvalidResponseError"); } - preAddSubRequest(subRequest) { - if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); - } - const path2 = getURLPath(subRequest.url); - if (!path2 || path2 === "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); - } + constructor(message) { + super(message); + this.name = "InvalidResponseError"; } - postAddSubRequest(subRequest) { - this.subRequests.set(this.operationCount, subRequest); - this.operationCount++; + }; + exports2.InvalidResponseError = InvalidResponseError; + var CacheNotFoundError = class extends Error { + static { + __name(this, "CacheNotFoundError"); } - // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + constructor(message = "Cache not found") { + super(message); + this.name = "CacheNotFoundError"; } - getMultipartContentType() { - return this.multipartContentType; + }; + exports2.CacheNotFoundError = CacheNotFoundError; + var GHESNotSupportedError = class extends Error { + static { + __name(this, "GHESNotSupportedError"); } - getSubRequests() { - return this.subRequests; + constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { + super(message); + this.name = "GHESNotSupportedError"; } }; - var BatchRequestAssemblePolicy = class extends coreHttp.BaseRequestPolicy { + exports2.GHESNotSupportedError = GHESNotSupportedError; + var NetworkError = class extends Error { static { - __name(this, "BatchRequestAssemblePolicy"); - } - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new coreHttp.WebResource(), - status: 200, - headers: new coreHttp.HttpHeaders() - }; - this.batchRequest = batchRequest; + __name(this, "NetworkError"); } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; + constructor(code) { + const message = `Unable to make request: ${code} +If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; + super(message); + this.code = code; + this.name = "NetworkError"; } }; - var BatchRequestAssemblePolicyFactory = class { + exports2.NetworkError = NetworkError; + NetworkError.isNetworkErrorCode = (code) => { + if (!code) + return false; + return [ + "ECONNRESET", + "ENOTFOUND", + "ETIMEDOUT", + "ECONNREFUSED", + "EHOSTUNREACH" + ].includes(code); + }; + var UsageError = class extends Error { static { - __name(this, "BatchRequestAssemblePolicyFactory"); - } - constructor(batchRequest) { - this.batchRequest = batchRequest; + __name(this, "UsageError"); } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. +More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; + super(message); + this.name = "UsageError"; } }; - var BatchHeaderFilterPolicy = class extends coreHttp.BaseRequestPolicy { - static { - __name(this, "BatchHeaderFilterPolicy"); + exports2.UsageError = UsageError; + UsageError.isUsageErrorMessage = (msg) => { + if (!msg) + return false; + return msg.includes("insufficient usage"); + }; + } +}); + +// ../node_modules/@actions/cache/lib/internal/uploadUtils.js +var require_uploadUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - return this._nextPolicy.sendRequest(request); - } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - var BatchHeaderFilterPolicyFactory = class { + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; + var core2 = __importStar3(require_core()); + var storage_blob_1 = require_dist4(); + var errors_1 = require_errors2(); + var UploadProgress = class { static { - __name(this, "BatchHeaderFilterPolicyFactory"); + __name(this, "UploadProgress"); } - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); + constructor(contentLength) { + this.contentLength = contentLength; + this.sentBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } - }; - var BlobBatchClient = class { - static { - __name(this, "BlobBatchClient"); + /** + * Sets the number of bytes sent + * + * @param sentBytes the number of bytes sent + */ + setSentBytes(sentBytes) { + this.sentBytes = sentBytes; } - constructor(url2, credentialOrPipeline, options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (!credentialOrPipeline) { - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - pipeline = newPipeline(credentialOrPipeline, options); - } - const storageClientContext = new StorageClientContext(url2, pipeline.toServiceClientOptions()); - const path2 = getURLPath(url2); - if (path2 && path2 !== "/") { - this.serviceOrContainerContext = new Container(storageClientContext); - } else { - this.serviceOrContainerContext = new Service(storageClientContext); - } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.sentBytes; } /** - * Creates a {@link BlobBatch}. - * A BlobBatch represents an aggregated set of operations on blobs. + * Returns true if the upload is complete. */ - createBatch() { - return new BlobBatch(); + isDone() { + return this.getTransferredBytes() === this.contentLength; } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } + /** + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); - } + const transferredBytes = this.sentBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core2.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; } - return this.submitBatch(batch); } /** - * Submit batch request which consists of multiple subrequests. - * - * Get `blobBatchClient` and other details before running the snippets. - * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` - * - * Example usage: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob(urlInString0, credential0); - * await batchRequest.deleteBlob(urlInString1, credential1, { - * deleteSnapshots: "include" - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` - * - * Example using a lease: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); - * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { - * conditions: { leaseId: leaseId } - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setSentBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. * - * @param batchRequest - A set of Delete or SetTier operations. - * @param options - + * @param delayInMs the delay between each write */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size === 0) { - throw new RangeError("Batch request should contain one or more sub requests."); + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = /* @__PURE__ */ __name(() => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }, "displayCallback"); + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + this.display(); + } + }; + exports2.UploadProgress = UploadProgress; + function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const blobClient = new storage_blob_1.BlobClient(signedUploadURL); + const blockBlobClient = blobClient.getBlockBlobClient(); + const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); + const uploadOptions = { + blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, + concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, + maxSingleShotSize: 128 * 1024 * 1024, + onProgress: uploadProgress.onProgress() + }; try { - const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; + uploadProgress.startDisplayTimer(); + core2.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); + if (response._response.status >= 400) { + throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); + } + return response; + } catch (error) { + core2.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`); + throw error; } finally { - span.end(); + uploadProgress.stopDisplayTimer(); } + }); + } + __name(uploadCacheArchiveSDK, "uploadCacheArchiveSDK"); + exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; + } +}); + +// ../node_modules/@actions/cache/lib/internal/requestUtils.js +var require_requestUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } + __setModuleDefault3(result, mod); + return result; }; - var ContainerClient = class extends StorageClient { - static { - __name(this, "ContainerClient"); + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - } else { - throw new Error("Expecting non-empty strings for containerName parameter"); } - super(url2, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var constants_1 = require_constants7(); + function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + return statusCode >= 200 && statusCode < 300; + } + __name(isSuccessStatusCode, "isSuccessStatusCode"); + exports2.isSuccessStatusCode = isSuccessStatusCode; + function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, the operation fails. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - Options to Container Create operation. - * - * - * Example usage: - * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); - * const createContainerResponse = await containerClient.create(); - * console.log("Container was created successfully", createContainerResponse.requestId); - * ``` - */ - async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + return statusCode >= 500; + } + __name(isServerErrorStatusCode, "isServerErrorStatusCode"); + exports2.isServerErrorStatusCode = isServerErrorStatusCode; + function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, it is not changed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - - */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); + } + __name(isRetryableStatusCode, "isRetryableStatusCode"); + exports2.isRetryableStatusCode = isRetryableStatusCode; + function sleep(milliseconds) { + return __awaiter3(this, void 0, void 0, function* () { + return new Promise((resolve) => setTimeout(resolve, milliseconds)); + }); + } + __name(sleep, "sleep"); + function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = void 0) { + return __awaiter3(this, void 0, void 0, function* () { + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = void 0; + let statusCode = void 0; + let isRetryable = false; + try { + response = yield method(); + } catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core2.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core2.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; } + throw Error(`${name} failed: ${errorMessage}`); + }); + } + __name(retry, "retry"); + exports2.retry = retry; + function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter3(this, void 0, void 0, function* () { + return yield retry( + name, + method, + (response) => response.statusCode, + maxAttempts, + delay, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {}, + error + }; + } else { + return void 0; + } + } + ); + }); + } + __name(retryTypedResponse, "retryTypedResponse"); + exports2.retryTypedResponse = retryTypedResponse; + function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter3(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); + } + __name(retryHttpClientResponse, "retryHttpClientResponse"); + exports2.retryHttpClientResponse = retryHttpClientResponse; + } +}); + +// ../node_modules/@actions/cache/lib/internal/downloadUtils.js +var require_downloadUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - /** - * Returns true if the Azure container resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing container might be deleted by other clients or - * applications. Vice versa new containers with the same name might be added by other clients or - * applications after this function completes. - * - * @param options - - */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" - }); - return false; + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var storage_blob_1 = require_dist4(); + var buffer = __importStar3(require("buffer")); + var fs2 = __importStar3(require("fs")); + var stream = __importStar3(require("stream")); + var util = __importStar3(require("util")); + var utils = __importStar3(require_cacheUtils()); + var constants_1 = require_constants7(); + var requestUtils_1 = require_requestUtils(); + var abort_controller_1 = require_dist(); + function pipeResponseToStream(response, output) { + return __awaiter3(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); + } + __name(pipeResponseToStream, "pipeResponseToStream"); + var DownloadProgress = class { + static { + __name(this, "DownloadProgress"); + } + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Creates a {@link BlobClient} + * Progress to the next segment. Only call this method when the previous segment + * is complete. * - * @param blobName - A blob name - * @returns A new BlobClient object for the given blob name. + * @param segmentSize the length of the next segment */ - getBlobClient(blobName) { - return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core2.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** - * Creates an {@link AppendBlobClient} + * Sets the number of bytes received for the current segment. * - * @param blobName - An append blob name + * @param receivedBytes the number of bytes received */ - getAppendBlobClient(blobName) { - return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; } /** - * Creates a {@link BlockBlobClient} - * - * @param blobName - A block blob name - * - * - * Example usage: - * - * ```js - * const content = "Hello world!"; - * - * const blockBlobClient = containerClient.getBlockBlobClient(""); - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` + * Returns the total number of bytes transferred. */ - getBlockBlobClient(blobName) { - return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; } /** - * Creates a {@link PageBlobClient} - * - * @param blobName - A page blob name + * Returns true if the download is complete. */ - getPageBlobClient(blobName) { - return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + isDone() { + return this.getTransferredBytes() === this.contentLength; } /** - * Returns all user-defined metadata and system properties for the specified - * container. The data returned does not include the container's list of blobs. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which - * will retain their original casing. - * - * @param options - Options to Container Get Properties operation. + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; + display() { + if (this.displayedComplete) { + return; } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core2.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; } } /** - * Marks the specified container for deletion. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container - * - * @param options - Options to Container Delete operation. - */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; } /** - * Marks the specified container for deletion if it exists. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * Starts the timer that displays the stats. * - * @param options - Options to Container Delete operation. + * @param delayInMs the delay between each write */ - async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = /* @__PURE__ */ __name(() => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + }, "displayCallback"); + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Sets one or more user-defined name-value pairs for the specified container. - * - * If no option provided, or no metadata defined in the parameter, the container - * metadata will be removed. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata - * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Options to Container Set Metadata operation. + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. */ - async setMetadata(metadata2, options = {}) { - if (!options.conditions) { - options.conditions = {}; + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + this.display(); + } + }; + exports2.DownloadProgress = DownloadProgress; + function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter3(this, void 0, void 0, function* () { + const writeStream = fs2.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient("actions/cache"); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation); + })); + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core2.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } else { + core2.debug("Unable to validate download, no Content-Length header"); } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + }); + } + __name(downloadCacheHttpClient, "downloadCacheHttpClient"); + exports2.downloadCacheHttpClient = downloadCacheHttpClient; + function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs2.promises.open(archivePath, "w"); + const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; + const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter3(this, void 0, void 0, function* () { + return yield httpClient.request("HEAD", archiveLocation, null, {}); + })); + const lengthHeader = res.message.headers["content-length"]; + if (lengthHeader === void 0 || lengthHeader === null) { + throw new Error("Content-Length not found on blob response"); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }), "promiseGetter") + }); + } + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }), "waitAndWrite"); + while (nextDownload = downloads.pop()) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } } finally { - span.end(); + httpClient.dispose(); + yield archiveDescriptor.close(); } - } - /** - * Gets the permissions for the specified container. The permissions indicate - * whether container data may be accessed publicly. - * - * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. - * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl - * - * @param options - Options to Container Get Access Policy operation. - */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; + }); + } + __name(downloadCacheHttpClientConcurrent, "downloadCacheHttpClientConcurrent"); + exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; + function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter3(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 3e4; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === "string") { + throw new Error("downloadSegmentRetry failed due to timeout"); + } + return result; + } catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = void 0; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + }); + } + __name(downloadSegmentRetry, "downloadSegmentRetry"); + function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter3(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter3(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); + } + __name(downloadSegment, "downloadSegment"); + function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + core2.debug("Unable to determine content length, downloading file with http-client..."); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } else { + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs2.openSync(archivePath, "w"); + try { + downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + })); + if (result === "timeout") { + controller.abort(); + throw new Error("Aborting cache download as the download time exceeded the timeout."); + } else if (Buffer.isBuffer(result)) { + fs2.writeFileSync(fd, result); } } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); + } finally { + downloadProgress.stopDisplayTimer(); + fs2.closeSync(fd); } - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); } + }); + } + __name(downloadCacheStorageSDK, "downloadCacheStorageSDK"); + exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; + var promiseWithTimeout = /* @__PURE__ */ __name((timeoutMs, promise) => __awaiter3(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then((result) => { + clearTimeout(timeoutHandle); + return result; + }); + }), "promiseWithTimeout"); + } +}); + +// ../node_modules/@actions/cache/lib/options.js +var require_options = __commonJS({ + "../node_modules/@actions/cache/lib/options.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getDownloadOptions = exports2.getUploadOptions = void 0; + var core2 = __importStar3(require_core()); + function getUploadOptions(copy) { + const result = { + useAzureSdk: false, + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.uploadConcurrency === "number") { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === "number") { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; + result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; + core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core2.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core2.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; + } + __name(getUploadOptions, "getUploadOptions"); + exports2.getUploadOptions = getUploadOptions; + function getDownloadOptions(copy) { + const result = { + useAzureSdk: false, + concurrentBlobDownloads: true, + downloadConcurrency: 8, + timeoutInMs: 3e4, + segmentTimeoutInMs: 6e5, + lookupOnly: false + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.concurrentBlobDownloads === "boolean") { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } + if (typeof copy.downloadConcurrency === "number") { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === "number") { + result.timeoutInMs = copy.timeoutInMs; + } + if (typeof copy.segmentTimeoutInMs === "number") { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + if (typeof copy.lookupOnly === "boolean") { + result.lookupOnly = copy.lookupOnly; + } + } + const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; + if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; + } + core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core2.debug(`Download concurrency: ${result.downloadConcurrency}`); + core2.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core2.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core2.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core2.debug(`Lookup only: ${result.lookupOnly}`); + return result; + } + __name(getDownloadOptions, "getDownloadOptions"); + exports2.getDownloadOptions = getDownloadOptions; + } +}); + +// ../node_modules/@actions/cache/lib/internal/config.js +var require_config = __commonJS({ + "../node_modules/@actions/cache/lib/internal/config.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCacheServiceURL = exports2.getCacheServiceVersion = exports2.isGhes = void 0; + function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === "GITHUB.COM"; + const isGheHost = hostname.endsWith(".GHE.COM"); + const isLocalHost = hostname.endsWith(".LOCALHOST"); + return !isGitHubHost && !isGheHost && !isLocalHost; + } + __name(isGhes, "isGhes"); + exports2.isGhes = isGhes; + function getCacheServiceVersion() { + if (isGhes()) + return "v1"; + return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; + } + __name(getCacheServiceVersion, "getCacheServiceVersion"); + exports2.getCacheServiceVersion = getCacheServiceVersion; + function getCacheServiceURL() { + const version3 = getCacheServiceVersion(); + switch (version3) { + case "v1": + return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; + case "v2": + return process.env["ACTIONS_RESULTS_URL"] || ""; + default: + throw new Error(`Unsupported cache service version: ${version3}`); + } + } + __name(getCacheServiceURL, "getCacheServiceURL"); + exports2.getCacheServiceURL = getCacheServiceURL; + } +}); + +// ../node_modules/@actions/cache/package.json +var require_package = __commonJS({ + "../node_modules/@actions/cache/package.json"(exports2, module2) { + module2.exports = { + name: "@actions/cache", + version: "4.0.0", + preview: true, + description: "Actions cache lib", + keywords: [ + "github", + "actions", + "cache" + ], + homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", + license: "MIT", + main: "lib/cache.js", + types: "lib/cache.d.ts", + directories: { + lib: "lib", + test: "__tests__" + }, + files: [ + "lib", + "!.DS_Store" + ], + publishConfig: { + access: "public" + }, + repository: { + type: "git", + url: "git+https://github.com/actions/toolkit.git", + directory: "packages/cache" + }, + scripts: { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + test: 'echo "Error: run tests from root" && exit 1', + tsc: "tsc" + }, + bugs: { + url: "https://github.com/actions/toolkit/issues" + }, + dependencies: { + "@actions/core": "^1.11.1", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "@protobuf-ts/plugin": "^2.9.4", + semver: "^6.3.1", + "twirp-ts": "^2.5.0" + }, + devDependencies: { + "@types/semver": "^6.0.0", + typescript: "^5.2.2" } - /** - * Sets the permissions for the specified container. The permissions indicate - * whether blobs in a container may be accessed publicly. - * - * When you set permissions for a container, the existing permissions are replaced. - * If no access or containerAcl provided, the existing container ACL will be - * removed. - * - * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. - * During this interval, a shared access signature that is associated with the stored access policy will - * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl - * - * @param access - The level of public access to data in the container. - * @param containerAcl - Array of elements each having a unique Id and details of the access policy. - * @param options - Options to Container Set Access Policy operation. - */ - async setAccessPolicy(access2, containerAcl2, options = {}) { - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { - const acl = []; - for (const identifier of containerAcl2 || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" - }, - id: identifier.id - }); - } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access: access2, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + }; + } +}); + +// ../node_modules/@actions/cache/lib/internal/shared/user-agent.js +var require_user_agent = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getUserAgentString = void 0; + var packageJson = require_package(); + function getUserAgentString() { + return `@actions/cache-${packageJson.version}`; + } + __name(getUserAgentString, "getUserAgentString"); + exports2.getUserAgentString = getUserAgentString; + } +}); + +// ../node_modules/@actions/cache/lib/internal/cacheHttpClient.js +var require_cacheHttpClient = __commonJS({ + "../node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - /** - * Get a {@link BlobLeaseClient} that manages leases on the container. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the container. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - /** - * Creates a new block blob, or updates the content of an existing block blob. - * - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, - * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better - * performance with concurrency uploading. - * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param blobName - Name of the block blob to create or update. - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to configure the Block Blob Upload operation. - * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. - */ - async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); - return { - blockBlobClient, - response - }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob - * - * @param blobName - - * @param options - Options to Blob Delete operation. - * @returns Block blob deletion response data. - */ - async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - return await blobClient.delete(updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); } - } - /** - * listBlobFlatSegment returns a single segment of blobs starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs - * - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Flat Segment operation. - */ - async listBlobFlatSegment(marker2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - /** - * listBlobHierarchySegment returns a single segment of blobs starting from - * the specified Marker. Use an empty Marker to start enumeration from the - * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Hierarchy Segment operation. - */ - async listBlobHierarchySegment(delimiter2, marker2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }) }) }); - return wrappedResponse; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var auth_1 = require_auth(); + var fs2 = __importStar3(require("fs")); + var url_1 = require("url"); + var utils = __importStar3(require_cacheUtils()); + var uploadUtils_1 = require_uploadUtils(); + var downloadUtils_1 = require_downloadUtils(); + var options_1 = require_options(); + var requestUtils_1 = require_requestUtils(); + var config_1 = require_config(); + var user_agent_1 = require_user_agent(); + function getCacheApiUrl(resource) { + const baseUrl = (0, config_1.getCacheServiceURL)(); + if (!baseUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); } - /** - * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse - * - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. - */ - listSegments(marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { - let listBlobsFlatSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); - marker2 = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); - } while (marker2); - } - }, "listSegments_1")); - } - /** - * Returns an AsyncIterableIterator of {@link BlobItem} objects - * - * @param options - Options to list blobs operation. - */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { - var e_1, _a; - let marker2; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const listBlobsFlatSegmentResponse = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_1) throw e_1.error; - } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core2.debug(`Resource Url: ${url}`); + return url; + } + __name(getCacheApiUrl, "getCacheApiUrl"); + function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; + } + __name(createAcceptHeader, "createAcceptHeader"); + function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader("application/json", "6.0-preview.1") + } + }; + return requestOptions; + } + __name(getRequestOptions, "getRequestOptions"); + function createHttpClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); + } + __name(createHttpClient, "createHttpClient"); + function getCacheEntry(keys, paths, options) { + return __awaiter3(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version3}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 204) { + if (core2.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version3); } - }, "listItems_1")); - } - /** - * Returns an async iterable iterator to list all the blobs - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * Example using `for await` syntax: - * - * ```js - * // Get the containerClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("");` - * let i = 1; - * for await (const blob of containerClient.listBlobsFlat()) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * let iter = containerClient.listBlobsFlat(); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * - * // Passing next marker as continuationToken - * - * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints 10 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` - * - * @param options - Options to list blobs. - * @returns An asyncIterableIterator that supports paging. - */ - listBlobsFlat(options = {}) { - const include2 = []; - if (options.includeCopy) { - include2.push("copy"); + return null; } - if (options.includeDeleted) { - include2.push("deleted"); + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); } - if (options.includeMetadata) { - include2.push("metadata"); + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error("Cache not found."); } - if (options.includeSnapshots) { - include2.push("snapshots"); + core2.setSecret(cacheDownloadUrl); + core2.debug(`Cache Result:`); + core2.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); + } + __name(getCacheEntry, "getCacheEntry"); + exports2.getCacheEntry = getCacheEntry; + function printCachesListForDiagnostics(key, httpClient, version3) { + return __awaiter3(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core2.debug(`No matching cache found for cache key '${key}', version '${version3} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key +Other caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core2.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } } - if (options.includeVersions) { - include2.push("versions"); + }); + } + __name(printCachesListForDiagnostics, "printCachesListForDiagnostics"); + function downloadCache(archiveLocation, archivePath, options) { + return __awaiter3(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = (0, options_1.getDownloadOptions)(options); + if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { + if (downloadOptions.useAzureSdk) { + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } else if (downloadOptions.concurrentBlobDownloads) { + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } + } else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } - if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); + }); + } + __name(downloadCache, "downloadCache"); + exports2.downloadCache = downloadCache; + function reserveCache(key, paths, options) { + return __awaiter3(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const reserveCacheRequest = { + key, + version: version3, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + })); + return response; + }); + } + __name(reserveCache, "reserveCache"); + exports2.reserveCache = reserveCache; + function getContentRange(start, end) { + return `bytes ${start}-${end}/*`; + } + __name(getContentRange, "getContentRange"); + function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter3(this, void 0, void 0, function* () { + core2.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) + }; + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); + })); + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } - if (options.includeTags) { - include2.push("tags"); + }); + } + __name(uploadChunk, "uploadChunk"); + function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter3(this, void 0, void 0, function* () { + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs2.openSync(archivePath, "r"); + const uploadOptions = (0, options_1.getUploadOptions)(options); + const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core2.debug("Awaiting all uploads"); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter3(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs2.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }).on("error", (error) => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } finally { + fs2.closeSync(fd); } - if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); + return; + }); + } + __name(uploadFile, "uploadFile"); + function commitCache(httpClient, cacheId, filesize) { + return __awaiter3(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); + } + __name(commitCache, "commitCache"); + function saveCache(cacheId, archivePath, signedUploadURL, options) { + return __awaiter3(this, void 0, void 0, function* () { + const uploadOptions = (0, options_1.getUploadOptions)(options); + if (uploadOptions.useAzureSdk) { + if (!signedUploadURL) { + throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); + } + yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); + } else { + const httpClient = createHttpClient(); + core2.debug("Upload cache"); + yield uploadFile(httpClient, cacheId, archivePath, options); + core2.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core2.info("Cache saved successfully"); } - if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); + }); + } + __name(saveCache, "saveCache"); + exports2.saveCache = saveCache; + } +}); + +// ../node_modules/twirp-ts/build/twirp/context.js +var require_context = __commonJS({ + "../node_modules/twirp-ts/build/twirp/context.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../node_modules/twirp-ts/build/twirp/hooks.js +var require_hooks = __commonJS({ + "../node_modules/twirp-ts/build/twirp/hooks.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (options.includeLegalHold) { - include2.push("legalhold"); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (options.prefix === "") { - options.prefix = void 0; + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItems(updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - }, "byPage") - }; + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isHook = exports2.chainHooks = void 0; + function chainHooks(...hooks) { + if (hooks.length === 0) { + return null; } - /** - * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. - */ - listHierarchySegments(delimiter2, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listHierarchySegments_1() { - let listBlobsHierarchySegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); - marker2 = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); - } while (marker2); - } - }, "listHierarchySegments_1")); + if (hooks.length === 1) { + return hooks[0]; } - /** - * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. - */ - listItemsByHierarchy(delimiter2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItemsByHierarchy_1() { - var e_2, _a; - let marker2; - try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const listBlobsHierarchySegmentResponse = _c.value; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix2 of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); - } + const serverHook = { + requestReceived(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestReceived) { + continue; } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + yield hook.requestReceived(ctx); + } + }); + }, + requestPrepared(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestPrepared) { + continue; + } + console.warn("hook requestPrepared is deprecated and will be removed in the next release. Please use responsePrepared instead."); + yield hook.requestPrepared(ctx); + } + }); + }, + responsePrepared(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.responsePrepared) { + continue; } + yield hook.responsePrepared(ctx); } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_2) throw e_2.error; + }); + }, + requestSent(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestSent) { + continue; + } + console.warn("hook requestSent is deprecated and will be removed in the next release. Please use responseSent instead."); + yield hook.requestSent(ctx); } - } - }, "listItemsByHierarchy_1")); + }); + }, + responseSent(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.responseSent) { + continue; + } + yield hook.responseSent(ctx); + } + }); + }, + requestRouted(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestRouted) { + continue; + } + yield hook.requestRouted(ctx); + } + }); + }, + error(ctx, err) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.error) { + continue; + } + yield hook.error(ctx, err); + } + }); + } + }; + return serverHook; + } + __name(chainHooks, "chainHooks"); + exports2.chainHooks = chainHooks; + function isHook(object) { + return "requestReceived" in object || "requestPrepared" in object || "requestSent" in object || "requestRouted" in object || "responsePrepared" in object || "responseSent" in object || "error" in object; + } + __name(isHook, "isHook"); + exports2.isHook = isHook; + } +}); + +// ../node_modules/twirp-ts/build/twirp/errors.js +var require_errors3 = __commonJS({ + "../node_modules/twirp-ts/build/twirp/errors.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isValidErrorCode = exports2.httpStatusFromErrorCode = exports2.TwirpErrorCode = exports2.BadRouteError = exports2.InternalServerErrorWith = exports2.InternalServerError = exports2.RequiredArgumentError = exports2.InvalidArgumentError = exports2.NotFoundError = exports2.TwirpError = void 0; + var TwirpError = class _TwirpError extends Error { + static { + __name(this, "TwirpError"); + } + constructor(code, msg) { + super(msg); + this.code = TwirpErrorCode.Internal; + this.meta = {}; + this.code = code; + this.msg = msg; + Object.setPrototypeOf(this, _TwirpError.prototype); } /** - * Returns an async iterable iterator to list all the blobs by hierarchy. - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. - * - * Example using `for await` syntax: - * - * ```js - * for await (const item of containerClient.listBlobsByHierarchy("/")) { - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); - * let entity = await iter.next(); - * while (!entity.done) { - * let item = entity.value; - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } - * entity = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * console.log("Listing blobs by hierarchy by page"); - * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { - * const segment = response.segment; - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * ``` - * - * Example using paging with a max page size: - * - * ```js - * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); - * - * let i = 1; - * for await (const response of containerClient - * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) - * .byPage({ maxPageSize: 2 })) { - * console.log(`Page ${i++}`); - * const segment = response.segment; - * - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * ``` - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * Adds a metadata kv to the error + * @param key + * @param value */ - listBlobsByHierarchy(delimiter2, options = {}) { - if (delimiter2 === "") { - throw new RangeError("delimiter should contain one or more characters"); - } - const include2 = []; - if (options.includeCopy) { - include2.push("copy"); - } - if (options.includeDeleted) { - include2.push("deleted"); - } - if (options.includeMetadata) { - include2.push("metadata"); - } - if (options.includeSnapshots) { - include2.push("snapshots"); - } - if (options.includeVersions) { - include2.push("versions"); - } - if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); - } - if (options.includeTags) { - include2.push("tags"); - } - if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include2.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; - } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - }, "byPage") - }; + withMeta(key, value) { + this.meta[key] = value; + return this; } /** - * The Filter Blobs operation enables callers to list blobs in the container whose tags - * match a given search expression. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * Returns a single metadata value + * return "" if not found + * @param key */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); - try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + getMeta(key) { + return this.meta[key] || ""; + } + /** + * Add the original error cause + * @param err + * @param addMeta + */ + withCause(err, addMeta = false) { + this._originalCause = err; + if (addMeta) { + this.withMeta("cause", err.message); } + return this; + } + cause() { + return this._originalCause; } /** - * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * Returns the error representation to JSON */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }, "findBlobsByTagsSegments_1")); + toJSON() { + try { + return JSON.stringify({ + code: this.code, + msg: this.msg, + meta: this.meta + }); + } catch (e) { + return `{"code": "internal", "msg": "There was an error but it could not be serialized into JSON"}`; + } } /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. + * Create a twirp error from an object + * @param obj */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { - var e_3, _a; - let marker2; + static fromObject(obj) { + const code = obj["code"] || TwirpErrorCode.Unknown; + const msg = obj["msg"] || "unknown"; + const error = new _TwirpError(code, msg); + if (obj["meta"]) { + Object.keys(obj["meta"]).forEach((key) => { + error.withMeta(key, obj["meta"][key]); + }); + } + return error; + } + }; + exports2.TwirpError = TwirpError; + var NotFoundError = class extends TwirpError { + static { + __name(this, "NotFoundError"); + } + constructor(msg) { + super(TwirpErrorCode.NotFound, msg); + } + }; + exports2.NotFoundError = NotFoundError; + var InvalidArgumentError = class extends TwirpError { + static { + __name(this, "InvalidArgumentError"); + } + constructor(argument, validationMsg) { + super(TwirpErrorCode.InvalidArgument, argument + " " + validationMsg); + this.withMeta("argument", argument); + } + }; + exports2.InvalidArgumentError = InvalidArgumentError; + var RequiredArgumentError = class extends InvalidArgumentError { + static { + __name(this, "RequiredArgumentError"); + } + constructor(argument) { + super(argument, "is required"); + } + }; + exports2.RequiredArgumentError = RequiredArgumentError; + var InternalServerError = class extends TwirpError { + static { + __name(this, "InternalServerError"); + } + constructor(msg) { + super(TwirpErrorCode.Internal, msg); + } + }; + exports2.InternalServerError = InternalServerError; + var InternalServerErrorWith = class extends InternalServerError { + static { + __name(this, "InternalServerErrorWith"); + } + constructor(err) { + super(err.message); + this.withMeta("cause", err.name); + this.withCause(err); + } + }; + exports2.InternalServerErrorWith = InternalServerErrorWith; + var BadRouteError = class extends TwirpError { + static { + __name(this, "BadRouteError"); + } + constructor(msg, method, url) { + super(TwirpErrorCode.BadRoute, msg); + this.withMeta("twirp_invalid_route", method + " " + url); + } + }; + exports2.BadRouteError = BadRouteError; + var TwirpErrorCode; + (function(TwirpErrorCode2) { + TwirpErrorCode2["Canceled"] = "canceled"; + TwirpErrorCode2["Unknown"] = "unknown"; + TwirpErrorCode2["InvalidArgument"] = "invalid_argument"; + TwirpErrorCode2["Malformed"] = "malformed"; + TwirpErrorCode2["DeadlineExceeded"] = "deadline_exceeded"; + TwirpErrorCode2["NotFound"] = "not_found"; + TwirpErrorCode2["BadRoute"] = "bad_route"; + TwirpErrorCode2["AlreadyExists"] = "already_exists"; + TwirpErrorCode2["PermissionDenied"] = "permission_denied"; + TwirpErrorCode2["Unauthenticated"] = "unauthenticated"; + TwirpErrorCode2["ResourceExhausted"] = "resource_exhausted"; + TwirpErrorCode2["FailedPrecondition"] = "failed_precondition"; + TwirpErrorCode2["Aborted"] = "aborted"; + TwirpErrorCode2["OutOfRange"] = "out_of_range"; + TwirpErrorCode2["Unimplemented"] = "unimplemented"; + TwirpErrorCode2["Internal"] = "internal"; + TwirpErrorCode2["Unavailable"] = "unavailable"; + TwirpErrorCode2["DataLoss"] = "data_loss"; + })(TwirpErrorCode = exports2.TwirpErrorCode || (exports2.TwirpErrorCode = {})); + function httpStatusFromErrorCode(code) { + switch (code) { + case TwirpErrorCode.Canceled: + return 408; + // RequestTimeout + case TwirpErrorCode.Unknown: + return 500; + // Internal Server Error + case TwirpErrorCode.InvalidArgument: + return 400; + // BadRequest + case TwirpErrorCode.Malformed: + return 400; + // BadRequest + case TwirpErrorCode.DeadlineExceeded: + return 408; + // RequestTimeout + case TwirpErrorCode.NotFound: + return 404; + // Not Found + case TwirpErrorCode.BadRoute: + return 404; + // Not Found + case TwirpErrorCode.AlreadyExists: + return 409; + // Conflict + case TwirpErrorCode.PermissionDenied: + return 403; + // Forbidden + case TwirpErrorCode.Unauthenticated: + return 401; + // Unauthorized + case TwirpErrorCode.ResourceExhausted: + return 429; + // Too Many Requests + case TwirpErrorCode.FailedPrecondition: + return 412; + // Precondition Failed + case TwirpErrorCode.Aborted: + return 409; + // Conflict + case TwirpErrorCode.OutOfRange: + return 400; + // Bad Request + case TwirpErrorCode.Unimplemented: + return 501; + // Not Implemented + case TwirpErrorCode.Internal: + return 500; + // Internal Server Error + case TwirpErrorCode.Unavailable: + return 503; + // Service Unavailable + case TwirpErrorCode.DataLoss: + return 500; + // Internal Server Error + default: + return 0; + } + } + __name(httpStatusFromErrorCode, "httpStatusFromErrorCode"); + exports2.httpStatusFromErrorCode = httpStatusFromErrorCode; + function isValidErrorCode(code) { + return httpStatusFromErrorCode(code) != 0; + } + __name(isValidErrorCode, "isValidErrorCode"); + exports2.isValidErrorCode = isValidErrorCode; + } +}); + +// ../node_modules/twirp-ts/build/twirp/request.js +var require_request3 = __commonJS({ + "../node_modules/twirp-ts/build/twirp/request.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_3_1) { - e_3 = { error: e_3_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_3) throw e_3.error; - } + step(generator.next(value)); + } catch (e) { + reject(e); } - }, "findBlobsByTagsItems_1")); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.parseTwirpPath = exports2.getRequestData = exports2.validateRequest = exports2.getContentType = exports2.TwirpContentType = void 0; + var errors_1 = require_errors3(); + var TwirpContentType; + (function(TwirpContentType2) { + TwirpContentType2[TwirpContentType2["Protobuf"] = 0] = "Protobuf"; + TwirpContentType2[TwirpContentType2["JSON"] = 1] = "JSON"; + TwirpContentType2[TwirpContentType2["Unknown"] = 2] = "Unknown"; + })(TwirpContentType = exports2.TwirpContentType || (exports2.TwirpContentType = {})); + function getContentType(mimeType) { + switch (mimeType) { + case "application/protobuf": + return TwirpContentType.Protobuf; + case "application/json": + return TwirpContentType.JSON; + default: + return TwirpContentType.Unknown; } - /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified container. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. - */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + } + __name(getContentType, "getContentType"); + exports2.getContentType = getContentType; + function validateRequest(ctx, request, pathPrefix) { + if (request.method !== "POST") { + const msg = `unsupported method ${request.method} (only POST is allowed)`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + const path2 = parseTwirpPath(request.url || ""); + if (path2.pkgService !== (ctx.packageName ? ctx.packageName + "." : "") + ctx.serviceName) { + const msg = `no handler for path ${request.url}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + if (path2.prefix !== pathPrefix) { + const msg = `invalid path prefix ${path2.prefix}, expected ${pathPrefix}, on path ${request.url}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + const mimeContentType = request.headers["content-type"] || ""; + if (ctx.contentType === TwirpContentType.Unknown) { + const msg = `unexpected Content-Type: ${request.headers["content-type"]}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + return Object.assign(Object.assign({}, path2), { mimeContentType, contentType: ctx.contentType }); + } + __name(validateRequest, "validateRequest"); + exports2.validateRequest = validateRequest; + function getRequestData(req) { + return new Promise((resolve, reject) => { + const reqWithRawBody = req; + if (reqWithRawBody.rawBody instanceof Buffer) { + resolve(reqWithRawBody.rawBody); + return; + } + const chunks = []; + req.on("data", (chunk) => chunks.push(chunk)); + req.on("end", () => __awaiter3(this, void 0, void 0, function* () { + const data = Buffer.concat(chunks); + resolve(data); + })); + req.on("error", (err) => { + if (req.aborted) { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.DeadlineExceeded, "failed to read request: deadline exceeded")); + } else { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, err.message).withCause(err)); + } + }); + req.on("close", () => { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Canceled, "failed to read request: context canceled")); + }); + }); + } + __name(getRequestData, "getRequestData"); + exports2.getRequestData = getRequestData; + function parseTwirpPath(path2) { + const parts = path2.split("/"); + if (parts.length < 2) { return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") + pkgService: "", + method: "", + prefix: "" }; } - getContainerNameFromUrl() { - let containerName; - try { - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - containerName = parsedUrl.getPath().split("/")[1]; - } else if (isIpEndpointStyle(parsedUrl)) { - containerName = parsedUrl.getPath().split("/")[2]; - } else { - containerName = parsedUrl.getPath().split("/")[1]; + return { + method: parts[parts.length - 1], + pkgService: parts[parts.length - 2], + prefix: parts.slice(0, parts.length - 2).join("/") + }; + } + __name(parseTwirpPath, "parseTwirpPath"); + exports2.parseTwirpPath = parseTwirpPath; + } +}); + +// ../node_modules/twirp-ts/build/twirp/server.js +var require_server = __commonJS({ + "../node_modules/twirp-ts/build/twirp/server.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - containerName = decodeURIComponent(containerName); - if (!containerName) { - throw new Error("Provided containerName is invalid."); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - return containerName; - } catch (error) { - throw new Error("Unable to extract containerName with provided information."); } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.writeError = exports2.TwirpServer = void 0; + var hooks_1 = require_hooks(); + var request_1 = require_request3(); + var errors_1 = require_errors3(); + var TwirpServer = class { + static { + __name(this, "TwirpServer"); + } + constructor(options) { + this.pathPrefix = "/twirp"; + this.hooks = []; + this.interceptors = []; + this.packageName = options.packageName; + this.serviceName = options.serviceName; + this.methodList = options.methodList; + this.matchRoute = options.matchRoute; + this.service = options.service; + } + /** + * Returns the prefix for this server + */ + get prefix() { + return this.pathPrefix; } /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * The http handler for twirp complaint endpoints + * @param options */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + httpHandler(options) { + return (req, resp) => { + if ((options === null || options === void 0 ? void 0 : options.prefix) !== void 0) { + this.withPrefix(options.prefix); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); - }); + return this._httpHandler(req, resp); + }; } /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this container. + * Adds interceptors or hooks to the request stack + * @param middlewares */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); - } - }; - var AccountSASPermissions = class _AccountSASPermissions { - static { - __name(this, "AccountSASPermissions"); - } - constructor() { - this.read = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.add = false; - this.create = false; - this.update = false; - this.process = false; - this.tag = false; - this.filter = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; + use(...middlewares) { + middlewares.forEach((middleware) => { + if (hooks_1.isHook(middleware)) { + this.hooks.push(middleware); + return this; + } + this.interceptors.push(middleware); + }); + return this; } /** - * Parse initializes the AccountSASPermissions fields from a string. - * - * @param permissions - + * Adds a prefix to the service url path + * @param prefix */ - static parse(permissions) { - const accountSASPermissions = new _AccountSASPermissions(); - for (const c of permissions) { - switch (c) { - case "r": - accountSASPermissions.read = true; - break; - case "w": - accountSASPermissions.write = true; - break; - case "d": - accountSASPermissions.delete = true; - break; - case "x": - accountSASPermissions.deleteVersion = true; - break; - case "l": - accountSASPermissions.list = true; - break; - case "a": - accountSASPermissions.add = true; - break; - case "c": - accountSASPermissions.create = true; - break; - case "u": - accountSASPermissions.update = true; - break; - case "p": - accountSASPermissions.process = true; - break; - case "t": - accountSASPermissions.tag = true; - break; - case "f": - accountSASPermissions.filter = true; - break; - case "i": - accountSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - accountSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission character: ${c}`); - } + withPrefix(prefix) { + if (prefix === false) { + this.pathPrefix = ""; + } else { + this.pathPrefix = prefix; } - return accountSASPermissions; + return this; } /** - * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - + * Returns the regex matching path for this twirp server */ - static from(permissionLike) { - const accountSASPermissions = new _AccountSASPermissions(); - if (permissionLike.read) { - accountSASPermissions.read = true; - } - if (permissionLike.write) { - accountSASPermissions.write = true; - } - if (permissionLike.delete) { - accountSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - accountSASPermissions.deleteVersion = true; - } - if (permissionLike.filter) { - accountSASPermissions.filter = true; - } - if (permissionLike.tag) { - accountSASPermissions.tag = true; - } - if (permissionLike.list) { - accountSASPermissions.list = true; - } - if (permissionLike.add) { - accountSASPermissions.add = true; - } - if (permissionLike.create) { - accountSASPermissions.create = true; - } - if (permissionLike.update) { - accountSASPermissions.update = true; - } - if (permissionLike.process) { - accountSASPermissions.process = true; - } - if (permissionLike.setImmutabilityPolicy) { - accountSASPermissions.setImmutabilityPolicy = true; - } - if (permissionLike.permanentDelete) { - accountSASPermissions.permanentDelete = true; - } - return accountSASPermissions; + matchingPath() { + const baseRegex = this.baseURI().replace(/\./g, "\\."); + return new RegExp(`${baseRegex}/(${this.methodList.join("|")})`); } /** - * Produces the SAS permissions string for an Azure Storage account. - * Call this method to set AccountSASSignatureValues Permissions field. - * - * Using this method will guarantee the resource types are in - * an order accepted by the service. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * + * Returns the base URI for this twirp server */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.filter) { - permissions.push("f"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.list) { - permissions.push("l"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.update) { - permissions.push("u"); - } - if (this.process) { - permissions.push("p"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); - } - if (this.permanentDelete) { - permissions.push("y"); - } - return permissions.join(""); - } - }; - var AccountSASResourceTypes = class _AccountSASResourceTypes { - static { - __name(this, "AccountSASResourceTypes"); + baseURI() { + return `${this.pathPrefix}/${this.packageName ? this.packageName + "." : ""}${this.serviceName}`; } - constructor() { - this.service = false; - this.container = false; - this.object = false; + /** + * Create a twirp context + * @param req + * @param res + * @private + */ + createContext(req, res) { + return { + packageName: this.packageName, + serviceName: this.serviceName, + methodName: "", + contentType: request_1.getContentType(req.headers["content-type"]), + req, + res + }; } /** - * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an - * Error if it encounters a character that does not correspond to a valid resource type. - * - * @param resourceTypes - + * Twrip server http handler implementation + * @param req + * @param resp + * @private */ - static parse(resourceTypes) { - const accountSASResourceTypes = new _AccountSASResourceTypes(); - for (const c of resourceTypes) { - switch (c) { - case "s": - accountSASResourceTypes.service = true; - break; - case "c": - accountSASResourceTypes.container = true; - break; - case "o": - accountSASResourceTypes.object = true; - break; - default: - throw new RangeError(`Invalid resource type: ${c}`); + _httpHandler(req, resp) { + return __awaiter3(this, void 0, void 0, function* () { + const ctx = this.createContext(req, resp); + try { + yield this.invokeHook("requestReceived", ctx); + const { method, mimeContentType } = request_1.validateRequest(ctx, req, this.pathPrefix || ""); + const handler = this.matchRoute(method, { + onMatch: /* @__PURE__ */ __name((ctx2) => { + return this.invokeHook("requestRouted", ctx2); + }, "onMatch"), + onNotFound: /* @__PURE__ */ __name(() => { + const msg = `no handler for path ${req.url}`; + throw new errors_1.BadRouteError(msg, req.method || "", req.url || ""); + }, "onNotFound") + }); + const body = yield request_1.getRequestData(req); + const response = yield handler(ctx, this.service, body, this.interceptors); + yield Promise.all([ + this.invokeHook("responsePrepared", ctx), + // keep backwards compatibility till next release + this.invokeHook("requestPrepared", ctx) + ]); + resp.statusCode = 200; + resp.setHeader("Content-Type", mimeContentType); + resp.end(response); + } catch (e) { + yield this.invokeHook("error", ctx, mustBeTwirpError(e)); + if (!resp.headersSent) { + writeError(resp, e); + } + } finally { + yield Promise.all([ + this.invokeHook("responseSent", ctx), + // keep backwards compatibility till next release + this.invokeHook("requestSent", ctx) + ]); } - } - return accountSASResourceTypes; + }); } /** - * Converts the given resource types to a string. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * + * Invoke a hook + * @param hookName + * @param ctx + * @param err + * @protected */ - toString() { - const resourceTypes = []; - if (this.service) { - resourceTypes.push("s"); - } - if (this.container) { - resourceTypes.push("c"); - } - if (this.object) { - resourceTypes.push("o"); - } - return resourceTypes.join(""); + invokeHook(hookName, ctx, err) { + return __awaiter3(this, void 0, void 0, function* () { + if (this.hooks.length === 0) { + return; + } + const chainedHooks = hooks_1.chainHooks(...this.hooks); + const hook = chainedHooks === null || chainedHooks === void 0 ? void 0 : chainedHooks[hookName]; + if (hook) { + yield hook(ctx, err || new errors_1.InternalServerError("internal server error")); + } + }); } }; - var AccountSASServices = class _AccountSASServices { - static { - __name(this, "AccountSASServices"); + exports2.TwirpServer = TwirpServer; + function writeError(res, error) { + const twirpError = mustBeTwirpError(error); + res.setHeader("Content-Type", "application/json"); + res.statusCode = errors_1.httpStatusFromErrorCode(twirpError.code); + res.end(twirpError.toJSON()); + } + __name(writeError, "writeError"); + exports2.writeError = writeError; + function mustBeTwirpError(err) { + if (err instanceof errors_1.TwirpError) { + return err; } - constructor() { - this.blob = false; - this.file = false; - this.queue = false; - this.table = false; + return new errors_1.InternalServerErrorWith(err); + } + __name(mustBeTwirpError, "mustBeTwirpError"); + } +}); + +// ../node_modules/twirp-ts/build/twirp/interceptors.js +var require_interceptors = __commonJS({ + "../node_modules/twirp-ts/build/twirp/interceptors.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Creates an {@link AccountSASServices} from the specified services string. This method will throw an - * Error if it encounters a character that does not correspond to a valid service. - * - * @param services - - */ - static parse(services) { - const accountSASServices = new _AccountSASServices(); - for (const c of services) { - switch (c) { - case "b": - accountSASServices.blob = true; - break; - case "f": - accountSASServices.file = true; - break; - case "q": - accountSASServices.queue = true; - break; - case "t": - accountSASServices.table = true; - break; - default: - throw new RangeError(`Invalid service character: ${c}`); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } } - return accountSASServices; - } - /** - * Converts the given services to a string. - * - */ - toString() { - const services = []; - if (this.blob) { - services.push("b"); - } - if (this.table) { - services.push("t"); - } - if (this.queue) { - services.push("q"); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (this.file) { - services.push("f"); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - return services.join(""); - } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - const version4 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.chainInterceptors = void 0; + function chainInterceptors(...interceptors) { + if (interceptors.length === 0) { + return; } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + if (interceptors.length === 1) { + return interceptors[0]; } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + const first = interceptors[0]; + return (ctx, request, handler) => __awaiter3(this, void 0, void 0, function* () { + let next = handler; + for (let i = interceptors.length - 1; i > 0; i--) { + next = /* @__PURE__ */ ((next2) => (ctx2, typedRequest) => { + return interceptors[i](ctx2, typedRequest, next2); + })(next); + } + return first(ctx, request, next); + }); + } + __name(chainInterceptors, "chainInterceptors"); + exports2.chainInterceptors = chainInterceptors; + } +}); + +// ../node_modules/dot-object/index.js +var require_dot_object = __commonJS({ + "../node_modules/dot-object/index.js"(exports2, module2) { + "use strict"; + function _process(v, mod) { + var i; + var r; + if (typeof mod === "function") { + r = mod(v); + if (r !== void 0) { + v = r; + } + } else if (Array.isArray(mod)) { + for (i = 0; i < mod.length; i++) { + r = mod[i](v); + if (r !== void 0) { + v = r; + } + } } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version4 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + return v; + } + __name(_process, "_process"); + function parseKey(key, val) { + if (key[0] === "-" && Array.isArray(val) && /^-\d+$/.test(key)) { + return val.length + parseInt(key, 10); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version4 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + return key; + } + __name(parseKey, "parseKey"); + function isIndex(k) { + return /^\d+$/.test(k); + } + __name(isIndex, "isIndex"); + function isObject(val) { + return Object.prototype.toString.call(val) === "[object Object]"; + } + __name(isObject, "isObject"); + function isArrayOrObject(val) { + return Object(val) === val; + } + __name(isArrayOrObject, "isArrayOrObject"); + function isEmptyObject(val) { + return Object.keys(val).length === 0; + } + __name(isEmptyObject, "isEmptyObject"); + var blacklist = ["__proto__", "prototype", "constructor"]; + var blacklistFilter = /* @__PURE__ */ __name(function(part) { + return blacklist.indexOf(part) === -1; + }, "blacklistFilter"); + function parsePath(path2, sep) { + if (path2.indexOf("[") >= 0) { + path2 = path2.replace(/\[/g, sep).replace(/]/g, ""); } - if (accountSASSignatureValues.encryptionScope && version4 < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + var parts = path2.split(sep); + var check = parts.filter(blacklistFilter); + if (check.length !== parts.length) { + throw Error("Refusing to update blacklisted property " + path2); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - let stringToSign; - if (version4 >= "2020-12-06") { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version4, - accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", - "" - // Account SAS requires an additional newline character - ].join("\n"); - } else { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version4, - "" - // Account SAS requires an additional newline character - ].join("\n"); + return parts; + } + __name(parsePath, "parsePath"); + var hasOwnProperty = Object.prototype.hasOwnProperty; + function DotObject(separator, override, useArray, useBrackets) { + if (!(this instanceof DotObject)) { + return new DotObject(separator, override, useArray, useBrackets); } - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version4, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope); + if (typeof override === "undefined") override = false; + if (typeof useArray === "undefined") useArray = true; + if (typeof useBrackets === "undefined") useBrackets = true; + this.separator = separator || "."; + this.override = override; + this.useArray = useArray; + this.useBrackets = useBrackets; + this.keepArray = false; + this.cleanup = []; } - __name(generateAccountSASQueryParameters, "generateAccountSASQueryParameters"); - var BlobServiceClient = class _BlobServiceClient extends StorageClient { - static { - __name(this, "BlobServiceClient"); + __name(DotObject, "DotObject"); + var dotDefault = new DotObject(".", false, true, true); + function wrap(method) { + return function() { + return dotDefault[method].apply(dotDefault, arguments); + }; + } + __name(wrap, "wrap"); + DotObject.prototype._fill = function(a, obj, v, mod) { + var k = a.shift(); + if (a.length > 0) { + obj[k] = obj[k] || (this.useArray && isIndex(a[0]) ? [] : {}); + if (!isArrayOrObject(obj[k])) { + if (this.override) { + obj[k] = {}; + } else { + if (!(isArrayOrObject(v) && isEmptyObject(v))) { + throw new Error( + "Trying to redefine `" + k + "` which is a " + typeof obj[k] + ); + } + return; + } + } + this._fill(a, obj[k], v, mod); + } else { + if (!this.override && isArrayOrObject(obj[k]) && !isEmptyObject(obj[k])) { + if (!(isArrayOrObject(v) && isEmptyObject(v))) { + throw new Error("Trying to redefine non-empty obj['" + k + "']"); + } + return; + } + obj[k] = _process(v, mod); } - constructor(url2, credentialOrPipeline, options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); + }; + DotObject.prototype.object = function(obj, mods) { + var self2 = this; + Object.keys(obj).forEach(function(k) { + var mod = mods === void 0 ? null : mods[k]; + var ok = parsePath(k, self2.separator).join(self2.separator); + if (ok.indexOf(self2.separator) !== -1) { + self2._fill(ok.split(self2.separator), obj, obj[k], mod); + delete obj[k]; } else { - pipeline = newPipeline(new AnonymousCredential(), options); + obj[k] = _process(obj[k], mod); } - super(url2, pipeline); - this.serviceContext = new Service(this.storageClientContext); + }); + return obj; + }; + DotObject.prototype.str = function(path2, v, obj, mod) { + var ok = parsePath(path2, this.separator).join(this.separator); + if (path2.indexOf(this.separator) !== -1) { + this._fill(ok.split(this.separator), obj, v, mod); + } else { + obj[path2] = _process(v, mod); } - /** - * - * Creates an instance of BlobServiceClient from connection string. - * - * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. - * [ Note - Account connection string can only be used in NODE.JS runtime. ] - * Account connection string example - - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` - * SAS connection string example - - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` - * @param options - Optional. Options to configure the HTTP pipeline. - */ - static fromConnectionString(connectionString, options) { - options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + return obj; + }; + DotObject.prototype.pick = function(path2, obj, remove, reindexArray) { + var i; + var keys; + var val; + var key; + var cp; + keys = parsePath(path2, this.separator); + for (i = 0; i < keys.length; i++) { + key = parseKey(keys[i], obj); + if (obj && typeof obj === "object" && key in obj) { + if (i === keys.length - 1) { + if (remove) { + val = obj[key]; + if (reindexArray && Array.isArray(obj)) { + obj.splice(key, 1); + } else { + delete obj[key]; + } + if (Array.isArray(obj)) { + cp = keys.slice(0, -1).join("."); + if (this.cleanup.indexOf(cp) === -1) { + this.cleanup.push(cp); + } + } + return val; + } else { + return obj[key]; } - const pipeline = newPipeline(sharedKeyCredential, options); - return new _BlobServiceClient(extractedCreds.url, pipeline); } else { - throw new Error("Account connection string is only supported in Node.js environment"); + obj = obj[key]; } - } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); - return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + return void 0; } } - /** - * Creates a {@link ContainerClient} object - * - * @param containerName - A container name - * @returns A new ContainerClient object for the given container name. - * - * Example usage: - * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); - * ``` - */ - getContainerClient(containerName) { - return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + if (remove && Array.isArray(obj)) { + obj = obj.filter(function(n) { + return n !== void 0; + }); } - /** - * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * - * @param containerName - Name of the container to create. - * @param options - Options to configure Container Create operation. - * @returns Container creation response and the corresponding container client. - */ - async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + return obj; + }; + DotObject.prototype.delete = function(path2, obj) { + return this.remove(path2, obj, true); + }; + DotObject.prototype.remove = function(path2, obj, reindexArray) { + var i; + this.cleanup = []; + if (Array.isArray(path2)) { + for (i = 0; i < path2.length; i++) { + this.pick(path2[i], obj, true, reindexArray); } - } - /** - * Deletes a Blob container. - * - * @param containerName - Name of the container to delete. - * @param options - Options to configure Container Delete operation. - * @returns Container deletion response. - */ - async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (!reindexArray) { + this._cleanup(obj); } + return obj; + } else { + return this.pick(path2, obj, true, reindexArray); } - /** - * Restore a previously deleted Blob container. - * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. - * - * @param deletedContainerName - Name of the previously deleted container. - * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. - * @param options - Options to configure Container Restore operation. - * @returns Container deletion response. - */ - async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ - deletedContainerName: deletedContainerName2, - deletedContainerVersion: deletedContainerVersion2 - }, updatedOptions)); - return { containerClient, containerUndeleteResponse }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + }; + DotObject.prototype._cleanup = function(obj) { + var ret; + var i; + var keys; + var root; + if (this.cleanup.length) { + for (i = 0; i < this.cleanup.length; i++) { + keys = this.cleanup[i].split("."); + root = keys.splice(0, -1).join("."); + ret = root ? this.pick(root, obj) : obj; + ret = ret[keys[0]].filter(function(v) { + return v !== void 0; }); - throw e; - } finally { - span.end(); + this.set(this.cleanup[i], ret, obj); } + this.cleanup = []; } - /** - * Rename an existing Blob Container. - * - * @param sourceContainerName - The name of the source container. - * @param destinationContainerName - The new name of the container. - * @param options - Options to configure Container Rename operation. - */ - /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ - // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { - const containerClient = this.getContainerClient(destinationContainerName); - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); - return { containerClient, containerRenameResponse }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + }; + DotObject.prototype.del = DotObject.prototype.remove; + DotObject.prototype.move = function(source, target, obj, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set(target, _process(this.pick(source, obj, true), mods), obj, merge); + } else { + merge = mods; + this.set(target, this.pick(source, obj, true), obj, merge); + } + return obj; + }; + DotObject.prototype.transfer = function(source, target, obj1, obj2, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set( + target, + _process(this.pick(source, obj1, true), mods), + obj2, + merge + ); + } else { + merge = mods; + this.set(target, this.pick(source, obj1, true), obj2, merge); + } + return obj2; + }; + DotObject.prototype.copy = function(source, target, obj1, obj2, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set( + target, + _process( + // clone what is picked + JSON.parse(JSON.stringify(this.pick(source, obj1, false))), + mods + ), + obj2, + merge + ); + } else { + merge = mods; + this.set(target, this.pick(source, obj1, false), obj2, merge); + } + return obj2; + }; + DotObject.prototype.set = function(path2, val, obj, merge) { + var i; + var k; + var keys; + var key; + if (typeof val === "undefined") { + return obj; + } + keys = parsePath(path2, this.separator); + for (i = 0; i < keys.length; i++) { + key = keys[i]; + if (i === keys.length - 1) { + if (merge && isObject(val) && isObject(obj[key])) { + for (k in val) { + if (hasOwnProperty.call(val, k)) { + obj[key][k] = val[k]; + } + } + } else if (merge && Array.isArray(obj[key]) && Array.isArray(val)) { + for (var j = 0; j < val.length; j++) { + obj[keys[i]].push(val[j]); + } + } else { + obj[key] = val; + } + } else if ( + // force the value to be an object + !hasOwnProperty.call(obj, key) || !isObject(obj[key]) && !Array.isArray(obj[key]) + ) { + if (/^\d+$/.test(keys[i + 1])) { + obj[key] = []; + } else { + obj[key] = {}; + } } + obj = obj[key]; + } + return obj; + }; + DotObject.prototype.transform = function(recipe, obj, tgt) { + obj = obj || {}; + tgt = tgt || {}; + Object.keys(recipe).forEach( + function(key) { + this.set(recipe[key], this.pick(key, obj), tgt); + }.bind(this) + ); + return tgt; + }; + DotObject.prototype.dot = function(obj, tgt, path2) { + tgt = tgt || {}; + path2 = path2 || []; + var isArray = Array.isArray(obj); + Object.keys(obj).forEach( + function(key) { + var index = isArray && this.useBrackets ? "[" + key + "]" : key; + if (isArrayOrObject(obj[key]) && (isObject(obj[key]) && !isEmptyObject(obj[key]) || Array.isArray(obj[key]) && !this.keepArray && obj[key].length !== 0)) { + if (isArray && this.useBrackets) { + var previousKey = path2[path2.length - 1] || ""; + return this.dot( + obj[key], + tgt, + path2.slice(0, -1).concat(previousKey + index) + ); + } else { + return this.dot(obj[key], tgt, path2.concat(index)); + } + } else { + if (isArray && this.useBrackets) { + tgt[path2.join(this.separator).concat("[" + key + "]")] = obj[key]; + } else { + tgt[path2.concat(index).join(this.separator)] = obj[key]; + } + } + }.bind(this) + ); + return tgt; + }; + DotObject.pick = wrap("pick"); + DotObject.move = wrap("move"); + DotObject.transfer = wrap("transfer"); + DotObject.transform = wrap("transform"); + DotObject.copy = wrap("copy"); + DotObject.object = wrap("object"); + DotObject.str = wrap("str"); + DotObject.set = wrap("set"); + DotObject.delete = wrap("delete"); + DotObject.del = DotObject.remove = wrap("remove"); + DotObject.dot = wrap("dot"); + ["override", "overwrite"].forEach(function(prop) { + Object.defineProperty(DotObject, prop, { + get: /* @__PURE__ */ __name(function() { + return dotDefault.override; + }, "get"), + set: /* @__PURE__ */ __name(function(val) { + dotDefault.override = !!val; + }, "set") + }); + }); + ["useArray", "keepArray", "useBrackets"].forEach(function(prop) { + Object.defineProperty(DotObject, prop, { + get: /* @__PURE__ */ __name(function() { + return dotDefault[prop]; + }, "get"), + set: /* @__PURE__ */ __name(function(val) { + dotDefault[prop] = val; + }, "set") + }); + }); + DotObject._process = _process; + module2.exports = DotObject; + } +}); + +// ../node_modules/twirp-ts/build/twirp/http.client.js +var require_http_client = __commonJS({ + "../node_modules/twirp-ts/build/twirp/http.client.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - /** - * Gets the properties of a storage account’s Blob service, including properties - * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * @param options - Options to the Service Get Properties operation. - * @returns Response data for the Service Get Properties operation. - */ - async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Sets properties for a storage account’s Blob service endpoint, including properties - * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties - * - * @param properties - - * @param options - Options to the Service Set Properties operation. - * @returns Response data for the Service Set Properties operation. - */ - async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - } - /** - * Retrieves statistics related to replication for the Blob service. It is only - * available on the secondary location endpoint when read-access geo-redundant - * replication is enabled for the storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats - * - * @param options - Options to the Service Get Statistics operation. - * @returns Response data for the Service Get Statistics operation. - */ - async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. - */ - async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - } - /** - * Returns a list of the containers under the specified account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to the Service List Container Segment operation. - * @returns Response data for the Service List Container Segment operation. - */ - async listContainersSegment(marker2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.FetchRPC = exports2.wrapErrorResponseToTwirpError = exports2.NodeHttpRPC = void 0; + var http = __importStar3(require("http")); + var https = __importStar3(require("https")); + var url_1 = require("url"); + var errors_1 = require_errors3(); + var NodeHttpRPC = /* @__PURE__ */ __name((options) => ({ + request(service, method, contentType, data) { + let client; + return new Promise((resolve, rejected) => { + const responseChunks = []; + const requestData = contentType === "application/protobuf" ? Buffer.from(data) : JSON.stringify(data); + const url = new url_1.URL(options.baseUrl); + const isHttps = url.protocol === "https:"; + if (isHttps) { + client = https; + } else { + client = http; + } + const prefix = url.pathname !== "/" ? url.pathname : ""; + const req = client.request(Object.assign(Object.assign({}, options ? options : {}), { method: "POST", protocol: url.protocol, host: url.hostname, port: url.port ? url.port : isHttps ? 443 : 80, path: `${prefix}/${service}/${method}`, headers: Object.assign(Object.assign({}, options.headers ? options.headers : {}), { "Content-Type": contentType, "Content-Length": contentType === "application/protobuf" ? Buffer.byteLength(requestData) : Buffer.from(requestData).byteLength }) }), (res) => { + res.on("data", (chunk) => responseChunks.push(chunk)); + res.on("end", () => { + const data2 = Buffer.concat(responseChunks); + if (res.statusCode != 200) { + rejected(wrapErrorResponseToTwirpError(data2.toString())); + } else { + if (contentType === "application/json") { + resolve(JSON.parse(data2.toString())); + } else { + resolve(data2); + } + } + }); + res.on("error", (err) => { + rejected(err); + }); + }).on("error", (err) => { + rejected(err); }); - throw e; - } finally { - span.end(); - } + req.end(requestData); + }); } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags - * match a given search expression. Filter blobs searches across all containers within a - * storage account but can be scoped within the expression to a single container. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; + }), "NodeHttpRPC"); + exports2.NodeHttpRPC = NodeHttpRPC; + function wrapErrorResponseToTwirpError(errorResponse) { + return errors_1.TwirpError.fromObject(JSON.parse(errorResponse)); + } + __name(wrapErrorResponseToTwirpError, "wrapErrorResponseToTwirpError"); + exports2.wrapErrorResponseToTwirpError = wrapErrorResponseToTwirpError; + var FetchRPC = /* @__PURE__ */ __name((options) => ({ + request(service, method, contentType, data) { + return __awaiter3(this, void 0, void 0, function* () { + const headers = new Headers(options.headers); + headers.set("content-type", contentType); + const response = yield fetch(`${options.baseUrl}/${service}/${method}`, Object.assign(Object.assign({}, options), { method: "POST", headers, body: data instanceof Uint8Array ? data : JSON.stringify(data) })); + if (response.status === 200) { + if (contentType === "application/json") { + return yield response.json(); } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); + return new Uint8Array(yield response.arrayBuffer()); } - }, "findBlobsByTagsSegments_1")); + throw errors_1.TwirpError.fromObject(yield response.json()); + }); } - /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. - */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { - var e_1, _a; - let marker2; + }), "FetchRPC"); + exports2.FetchRPC = FetchRPC; + } +}); + +// ../node_modules/twirp-ts/build/twirp/gateway.js +var require_gateway = __commonJS({ + "../node_modules/twirp-ts/build/twirp/gateway.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_1) throw e_1.error; - } + step(generator.next(value)); + } catch (e) { + reject(e); } - }, "findBlobsByTagsItems_1")); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __rest2 = exports2 && exports2.__rest || function(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Gateway = exports2.Pattern = void 0; + var querystring_1 = require("querystring"); + var dotObject = __importStar3(require_dot_object()); + var request_1 = require_request3(); + var errors_1 = require_errors3(); + var http_client_1 = require_http_client(); + var server_1 = require_server(); + var Pattern; + (function(Pattern2) { + Pattern2["POST"] = "post"; + Pattern2["GET"] = "get"; + Pattern2["PATCH"] = "patch"; + Pattern2["PUT"] = "put"; + Pattern2["DELETE"] = "delete"; + })(Pattern = exports2.Pattern || (exports2.Pattern = {})); + var Gateway = class { + static { + __name(this, "Gateway"); + } + constructor(routes) { + this.routes = routes; } /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * Middleware that rewrite the current request + * to a Twirp compliant request */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") + twirpRewrite(prefix = "/twirp") { + return (req, resp, next) => { + this.rewrite(req, resp, prefix).then(() => next()).catch((e) => { + if (e instanceof errors_1.TwirpError) { + if (e.code !== errors_1.TwirpErrorCode.NotFound) { + server_1.writeError(resp, e); + } else { + next(); + } + } + }); }; } /** - * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list containers operation. + * Rewrite an incoming request to a Twirp compliant request + * @param req + * @param resp + * @param prefix */ - listSegments(marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { - let listContainersSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker2 = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker2); + rewrite(req, resp, prefix = "/twirp") { + return __awaiter3(this, void 0, void 0, function* () { + const [match, route] = this.matchRoute(req); + const body = yield this.prepareTwirpBody(req, match, route); + const twirpUrl = `${prefix}/${route.packageName}.${route.serviceName}/${route.methodName}`; + req.url = twirpUrl; + req.originalUrl = twirpUrl; + req.method = "POST"; + req.headers["content-type"] = "application/json"; + req.rawBody = Buffer.from(JSON.stringify(body)); + if (route.responseBodyKey) { + const endFn = resp.end.bind(resp); + resp.end = function(chunk) { + if (resp.statusCode === 200) { + endFn(`{ "${route.responseBodyKey}": ${chunk} }`); + } else { + endFn(chunk); + } + }; } - }, "listSegments_1")); + }); } /** - * Returns an AsyncIterableIterator for Container Items - * - * @param options - Options to list containers operation. + * Create a reverse proxy handler to + * proxy http requests to Twirp Compliant handlers + * @param httpClientOption */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { - var e_2, _a; - let marker2; + reverseProxy(httpClientOption) { + const client = http_client_1.NodeHttpRPC(httpClientOption); + return (req, res) => __awaiter3(this, void 0, void 0, function* () { try { - for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + const [match, route] = this.matchRoute(req); + const body = yield this.prepareTwirpBody(req, match, route); + const response = yield client.request(`${route.packageName}.${route.serviceName}`, route.methodName, "application/json", body); + res.statusCode = 200; + res.setHeader("content-type", "application/json"); + let jsonResponse; + if (route.responseBodyKey) { + jsonResponse = JSON.stringify({ [route.responseBodyKey]: response }); + } else { + jsonResponse = JSON.stringify(response); } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { + res.end(jsonResponse); + } catch (e) { + server_1.writeError(res, e); + } + }); + } + /** + * Prepares twirp body requests using http.google.annotions + * compliant spec + * + * @param req + * @param match + * @param route + * @protected + */ + prepareTwirpBody(req, match, route) { + return __awaiter3(this, void 0, void 0, function* () { + const _a = match.params, { query_string } = _a, params = __rest2(_a, ["query_string"]); + let requestBody = Object.assign({}, params); + if (query_string && route.bodyKey !== "*") { + const queryParams = this.parseQueryString(query_string); + requestBody = Object.assign(Object.assign({}, queryParams), requestBody); + } + let body = {}; + if (route.bodyKey) { + const data = yield request_1.getRequestData(req); try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_2) throw e_2.error; + const jsonBody = JSON.parse(data.toString() || "{}"); + if (route.bodyKey === "*") { + body = jsonBody; + } else { + body[route.bodyKey] = jsonBody; + } + } catch (e) { + const msg = "the json request could not be decoded"; + throw new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } - }, "listItems_1")); + return Object.assign(Object.assign({}, body), requestBody); + }); } /** - * Returns an async iterable iterator to list all the containers - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the containers in pages. - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const container of blobServiceClient.listContainers()) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.listContainers(); - * let containerItem = await iter.next(); - * while (!containerItem.done) { - * console.log(`Container ${i++}: ${containerItem.value.name}`); - * containerItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .listContainers() - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints 10 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * ``` - * - * @param options - Options to list containers. - * @returns An asyncIterableIterator that supports paging. + * Matches a route + * @param req */ - listContainers(options = {}) { - if (options.prefix === "") { - options.prefix = void 0; - } - const include2 = []; - if (options.includeDeleted) { - include2.push("deleted"); - } - if (options.includeMetadata) { - include2.push("metadata"); + matchRoute(req) { + var _a; + const httpMethod = (_a = req.method) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!httpMethod) { + throw new errors_1.BadRouteError(`method not allowed`, req.method || "", req.url || ""); } - if (options.includeSystem) { - include2.push("system"); + const routes = this.routes[httpMethod]; + for (const route of routes) { + const match = route.matcher(req.url || "/"); + if (match) { + return [match, route]; + } } - const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") - }; + throw new errors_1.NotFoundError(`url ${req.url} not found`); } /** - * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). - * - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key - * - * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time - * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + * Parse query string + * @param queryString */ - async getUserDelegationKey(startsOn, expiresOn2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn2, false) - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + parseQueryString(queryString) { + const queryParams = querystring_1.parse(queryString.replace("?", "")); + return dotObject.object(queryParams); + } + }; + exports2.Gateway = Gateway; + } +}); + +// ../node_modules/twirp-ts/build/twirp/index.js +var require_twirp = __commonJS({ + "../node_modules/twirp-ts/build/twirp/index.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding3(exports3, m, p); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TwirpContentType = void 0; + __exportStar2(require_context(), exports2); + __exportStar2(require_server(), exports2); + __exportStar2(require_interceptors(), exports2); + __exportStar2(require_hooks(), exports2); + __exportStar2(require_errors3(), exports2); + __exportStar2(require_gateway(), exports2); + __exportStar2(require_http_client(), exports2); + var request_1 = require_request3(); + Object.defineProperty(exports2, "TwirpContentType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return request_1.TwirpContentType; + }, "get") }); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js +var require_json_typings = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isJsonObject = exports2.typeofJsonValue = void 0; + function typeofJsonValue(value) { + let t = typeof value; + if (t == "object") { + if (Array.isArray(value)) + return "array"; + if (value === null) + return "null"; + } + return t; + } + __name(typeofJsonValue, "typeofJsonValue"); + exports2.typeofJsonValue = typeofJsonValue; + function isJsonObject(value) { + return value !== null && typeof value == "object" && !Array.isArray(value); + } + __name(isJsonObject, "isJsonObject"); + exports2.isJsonObject = isJsonObject; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js +var require_base642 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.base64encode = exports2.base64decode = void 0; + var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); + var decTable = []; + for (let i = 0; i < encTable.length; i++) + decTable[encTable[i].charCodeAt(0)] = i; + decTable["-".charCodeAt(0)] = encTable.indexOf("+"); + decTable["_".charCodeAt(0)] = encTable.indexOf("/"); + function base64decode(base64Str) { + let es = base64Str.length * 3 / 4; + if (base64Str[base64Str.length - 2] == "=") + es -= 2; + else if (base64Str[base64Str.length - 1] == "=") + es -= 1; + let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; + for (let i = 0; i < base64Str.length; i++) { + b = decTable[base64Str.charCodeAt(i)]; + if (b === void 0) { + switch (base64Str[i]) { + case "=": + groupPos = 0; + // reset state when padding found + case "\n": + case "\r": + case " ": + case " ": + continue; + // skip white-space, and padding + default: + throw Error(`invalid base64 string.`); + } + } + switch (groupPos) { + case 0: + p = b; + groupPos = 1; + break; + case 1: + bytes[bytePos++] = p << 2 | (b & 48) >> 4; + p = b; + groupPos = 2; + break; + case 2: + bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; + p = b; + groupPos = 3; + break; + case 3: + bytes[bytePos++] = (p & 3) << 6 | b; + groupPos = 0; + break; } } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this service. - */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + if (groupPos == 1) + throw Error(`invalid base64 string.`); + return bytes.subarray(0, bytePos); + } + __name(base64decode, "base64decode"); + exports2.base64decode = base64decode; + function base64encode(bytes) { + let base64 = "", groupPos = 0, b, p = 0; + for (let i = 0; i < bytes.length; i++) { + b = bytes[i]; + switch (groupPos) { + case 0: + base64 += encTable[b >> 2]; + p = (b & 3) << 4; + groupPos = 1; + break; + case 1: + base64 += encTable[p | b >> 4]; + p = (b & 15) << 2; + groupPos = 2; + break; + case 2: + base64 += encTable[p | b >> 6]; + base64 += encTable[b & 63]; + groupPos = 0; + break; + } } - /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + if (groupPos) { + base64 += encTable[p]; + base64 += "="; + if (groupPos == 1) + base64 += "="; + } + return base64; + } + __name(base64encode, "base64encode"); + exports2.base64encode = base64encode; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js +var require_protobufjs_utf8 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.utf8read = void 0; + var fromCharCodes = /* @__PURE__ */ __name((chunk) => String.fromCharCode.apply(String, chunk), "fromCharCodes"); + function utf8read(bytes) { + if (bytes.length < 1) + return ""; + let pos = 0, parts = [], chunk = [], i = 0, t; + let len = bytes.length; + while (pos < len) { + t = bytes[pos++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; + chunk[i++] = 55296 + (t >> 10); + chunk[i++] = 56320 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; + if (i > 8191) { + parts.push(fromCharCodes(chunk)); + i = 0; } - if (expiresOn2 === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + } + if (parts.length) { + if (i) + parts.push(fromCharCodes(chunk.slice(0, i))); + return parts.join(""); + } + return fromCharCodes(chunk.slice(0, i)); + } + __name(utf8read, "utf8read"); + exports2.utf8read = utf8read; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js +var require_binary_format_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; + var UnknownFieldHandler; + (function(UnknownFieldHandler2) { + UnknownFieldHandler2.symbol = Symbol.for("protobuf-ts/unknown"); + UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { + let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; + container.push({ no: fieldNo, wireType, data }); + }; + UnknownFieldHandler2.onWrite = (typeName, message, writer) => { + for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) + writer.tag(no, wireType).raw(data); + }; + UnknownFieldHandler2.list = (message, fieldNo) => { + if (is(message)) { + let all = message[UnknownFieldHandler2.symbol]; + return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; } - const sas = generateAccountSASQueryParameters(Object.assign({ - permissions, - expiresOn: expiresOn2, - resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).toString(); - return appendToURLQuery(this.url, sas); - } - }; - exports2.KnownEncryptionAlgorithmType = void 0; - (function(KnownEncryptionAlgorithmType) { - KnownEncryptionAlgorithmType["AES256"] = "AES256"; - })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); - Object.defineProperty(exports2, "BaseRequestPolicy", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.BaseRequestPolicy; - }, "get") - }); - Object.defineProperty(exports2, "HttpHeaders", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.HttpHeaders; - }, "get") - }); - Object.defineProperty(exports2, "RequestPolicyOptions", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.RequestPolicyOptions; - }, "get") - }); - Object.defineProperty(exports2, "RestError", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.RestError; - }, "get") - }); - Object.defineProperty(exports2, "WebResource", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.WebResource; - }, "get") - }); - Object.defineProperty(exports2, "deserializationPolicy", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.deserializationPolicy; - }, "get") - }); - exports2.AccountSASPermissions = AccountSASPermissions; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; - exports2.AccountSASServices = AccountSASServices; - exports2.AnonymousCredential = AnonymousCredential; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; - exports2.AppendBlobClient = AppendBlobClient; - exports2.BlobBatch = BlobBatch; - exports2.BlobBatchClient = BlobBatchClient; - exports2.BlobClient = BlobClient; - exports2.BlobLeaseClient = BlobLeaseClient; - exports2.BlobSASPermissions = BlobSASPermissions; - exports2.BlobServiceClient = BlobServiceClient; - exports2.BlockBlobClient = BlockBlobClient; - exports2.ContainerClient = ContainerClient; - exports2.ContainerSASPermissions = ContainerSASPermissions; - exports2.Credential = Credential; - exports2.CredentialPolicy = CredentialPolicy; - exports2.PageBlobClient = PageBlobClient; - exports2.Pipeline = Pipeline; - exports2.SASQueryParameters = SASQueryParameters; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; - exports2.StorageOAuthScopes = StorageOAuthScopes; - exports2.StorageRetryPolicy = StorageRetryPolicy; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; - exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; - exports2.isPipelineLike = isPipelineLike; - exports2.logger = logger; - exports2.newPipeline = newPipeline; + return []; + }; + UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; + const is = /* @__PURE__ */ __name((message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]), "is"); + })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); + function mergeBinaryOptions(a, b) { + return Object.assign(Object.assign({}, a), b); + } + __name(mergeBinaryOptions, "mergeBinaryOptions"); + exports2.mergeBinaryOptions = mergeBinaryOptions; + var WireType; + (function(WireType2) { + WireType2[WireType2["Varint"] = 0] = "Varint"; + WireType2[WireType2["Bit64"] = 1] = "Bit64"; + WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; + WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; + WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; + WireType2[WireType2["Bit32"] = 5] = "Bit32"; + })(WireType = exports2.WireType || (exports2.WireType = {})); } }); -// ../node_modules/@actions/cache/lib/internal/shared/errors.js -var require_errors2 = __commonJS({ - "../node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js +var require_goog_varint = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; - var FilesNotFoundError = class extends Error { - static { - __name(this, "FilesNotFoundError"); + exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; + function varint64read() { + let lowBits = 0; + let highBits = 0; + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } } - constructor(files = []) { - let message = "No files were found to upload"; - if (files.length > 0) { - message += `: ${files.join(", ")}`; + let middleByte = this.buf[this.pos++]; + lowBits |= (middleByte & 15) << 28; + highBits = (middleByte & 112) >> 4; + if ((middleByte & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - super(message); - this.files = files; - this.name = "FilesNotFoundError"; } - }; - exports2.FilesNotFoundError = FilesNotFoundError; - var InvalidResponseError = class extends Error { - static { - __name(this, "InvalidResponseError"); + throw new Error("invalid varint"); + } + __name(varint64read, "varint64read"); + exports2.varint64read = varint64read; + function varint64write(lo, hi, bytes) { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !(shift >>> 7 == 0 && hi == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; + } } - constructor(message) { - super(message); - this.name = "InvalidResponseError"; + const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; + const hasMoreBits = !(hi >> 3 == 0); + bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); + if (!hasMoreBits) { + return; } - }; - exports2.InvalidResponseError = InvalidResponseError; - var CacheNotFoundError = class extends Error { - static { - __name(this, "CacheNotFoundError"); + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !(shift >>> 7 == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; + } } - constructor(message = "Cache not found") { - super(message); - this.name = "CacheNotFoundError"; + bytes.push(hi >>> 31 & 1); + } + __name(varint64write, "varint64write"); + exports2.varint64write = varint64write; + var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); + function int64fromString(dec) { + let minus = dec[0] == "-"; + if (minus) + dec = dec.slice(1); + const base = 1e6; + let lowBits = 0; + let highBits = 0; + function add1e6digit(begin, end) { + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + if (lowBits >= TWO_PWR_32_DBL) { + highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); + lowBits = lowBits % TWO_PWR_32_DBL; + } } - }; - exports2.CacheNotFoundError = CacheNotFoundError; - var GHESNotSupportedError = class extends Error { - static { - __name(this, "GHESNotSupportedError"); + __name(add1e6digit, "add1e6digit"); + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return [minus, lowBits, highBits]; + } + __name(int64fromString, "int64fromString"); + exports2.int64fromString = int64fromString; + function int64toString(bitsLow, bitsHigh) { + if (bitsHigh >>> 0 <= 2097151) { + return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); } - constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { - super(message); - this.name = "GHESNotSupportedError"; + let low = bitsLow & 16777215; + let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; + let high = bitsHigh >> 16 & 65535; + let digitA = low + mid * 6777216 + high * 6710656; + let digitB = mid + high * 8147497; + let digitC = high * 2; + let base = 1e7; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; } - }; - exports2.GHESNotSupportedError = GHESNotSupportedError; - var NetworkError = class extends Error { - static { - __name(this, "NetworkError"); + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; } - constructor(code) { - const message = `Unable to make request: ${code} -If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; - super(message); - this.code = code; - this.name = "NetworkError"; + function decimalFrom1e7(digit1e7, needLeadingZeros) { + let partial = digit1e7 ? String(digit1e7) : ""; + if (needLeadingZeros) { + return "0000000".slice(partial.length) + partial; + } + return partial; } - }; - exports2.NetworkError = NetworkError; - NetworkError.isNetworkErrorCode = (code) => { - if (!code) - return false; - return [ - "ECONNRESET", - "ENOTFOUND", - "ETIMEDOUT", - "ECONNREFUSED", - "EHOSTUNREACH" - ].includes(code); - }; - var UsageError = class extends Error { - static { - __name(this, "UsageError"); + __name(decimalFrom1e7, "decimalFrom1e7"); + return decimalFrom1e7( + digitC, + /*needLeadingZeros=*/ + 0 + ) + decimalFrom1e7( + digitB, + /*needLeadingZeros=*/ + digitC + ) + // If the final 1e7 digit didn't need leading zeros, we would have + // returned via the trivial code path at the top. + decimalFrom1e7( + digitA, + /*needLeadingZeros=*/ + 1 + ); + } + __name(int64toString, "int64toString"); + exports2.int64toString = int64toString; + function varint32write(value, bytes) { + if (value >= 0) { + while (value > 127) { + bytes.push(value & 127 | 128); + value = value >>> 7; + } + bytes.push(value); + } else { + for (let i = 0; i < 9; i++) { + bytes.push(value & 127 | 128); + value = value >> 7; + } + bytes.push(1); } - constructor() { - const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. -More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; - super(message); - this.name = "UsageError"; + } + __name(varint32write, "varint32write"); + exports2.varint32write = varint32write; + function varint32read() { + let b = this.buf[this.pos++]; + let result = b & 127; + if ((b & 128) == 0) { + this.assertBounds(); + return result; } - }; - exports2.UsageError = UsageError; - UsageError.isUsageErrorMessage = (msg) => { - if (!msg) - return false; - return msg.includes("insufficient usage"); - }; + b = this.buf[this.pos++]; + result |= (b & 127) << 7; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 14; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 21; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 15) << 28; + for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + if ((b & 128) != 0) + throw new Error("invalid varint"); + this.assertBounds(); + return result >>> 0; + } + __name(varint32read, "varint32read"); + exports2.varint32read = varint32read; } }); -// ../node_modules/@actions/cache/lib/internal/uploadUtils.js -var require_uploadUtils = __commonJS({ - "../node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js +var require_pb_long = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; + var goog_varint_1 = require_goog_varint(); + var BI; + function detectBi() { + const dv = new DataView(new ArrayBuffer(8)); + const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; + BI = ok ? { + MIN: BigInt("-9223372036854775808"), + MAX: BigInt("9223372036854775807"), + UMIN: BigInt("0"), + UMAX: BigInt("18446744073709551615"), + C: BigInt, + V: dv + } : void 0; + } + __name(detectBi, "detectBi"); + exports2.detectBi = detectBi; + detectBi(); + function assertBi(bi) { + if (!bi) + throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); + } + __name(assertBi, "assertBi"); + var RE_DECIMAL_STR = /^-?[0-9]+$/; + var TWO_PWR_32_DBL = 4294967296; + var HALF_2_PWR_32 = 2147483648; + var SharedPbLong = class { + static { + __name(this, "SharedPbLong"); } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + /** + * Create a new instance with the given bits. + */ + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + /** + * Is this instance equal to 0? + */ + isZero() { + return this.lo == 0 && this.hi == 0; + } + /** + * Convert to a native number. + */ + toNumber() { + let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); + if (!Number.isSafeInteger(result)) + throw new Error("cannot convert to safe number"); + return result; } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; - var core2 = __importStar3(require_core()); - var storage_blob_1 = require_dist4(); - var errors_1 = require_errors2(); - var UploadProgress = class { + var PbULong = class _PbULong extends SharedPbLong { static { - __name(this, "UploadProgress"); + __name(this, "PbULong"); } - constructor(contentLength) { - this.contentLength = contentLength; - this.sentBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); + /** + * Create instance from a `string`, `number` or `bigint`. + */ + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.UMIN) + throw new Error("signed value for ulong"); + if (value > BI.UMAX) + throw new Error("ulong too large"); + BI.V.setBigUint64(0, value, true); + return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) + throw new Error("signed value for ulong"); + return new _PbULong(lo, hi); + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + if (value < 0) + throw new Error("signed value for ulong"); + return new _PbULong(value, value / TWO_PWR_32_DBL); + } + throw new Error("unknown value " + typeof value); } /** - * Sets the number of bytes sent - * - * @param sentBytes the number of bytes sent + * Convert to decimal string. */ - setSentBytes(sentBytes) { - this.sentBytes = sentBytes; + toString() { + return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); } /** - * Returns the total number of bytes transferred. + * Convert to native bigint. */ - getTransferredBytes() { - return this.sentBytes; + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigUint64(0, true); + } + }; + exports2.PbULong = PbULong; + PbULong.ZERO = new PbULong(0, 0); + var PbLong = class _PbLong extends SharedPbLong { + static { + __name(this, "PbLong"); } /** - * Returns true if the upload is complete. + * Create instance from a `string`, `number` or `bigint`. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.MIN) + throw new Error("signed long too small"); + if (value > BI.MAX) + throw new Error("signed long too large"); + BI.V.setBigInt64(0, value, true); + return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) { + if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) + throw new Error("signed long too small"); + } else if (hi >= HALF_2_PWR_32) + throw new Error("signed long too large"); + let pbl = new _PbLong(lo, hi); + return minus ? pbl.negate() : pbl; + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + } + throw new Error("unknown value " + typeof value); } /** - * Prints the current upload stats. Once the upload completes, this will print one - * last line and then stop. + * Do we have a minus sign? */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.sentBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core2.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } + isNegative() { + return (this.hi & HALF_2_PWR_32) !== 0; } /** - * Returns a function used to handle TransferProgressEvents. + * Negate two's complement. + * Invert all the bits and add one to the result. */ - onProgress() { - return (progress) => { - this.setSentBytes(progress.loadedBytes); - }; + negate() { + let hi = ~this.hi, lo = this.lo; + if (lo) + lo = ~lo + 1; + else + hi += 1; + return new _PbLong(lo, hi); } /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Convert to decimal string. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = /* @__PURE__ */ __name(() => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }, "displayCallback"); - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + toString() { + if (BI) + return this.toBigInt().toString(); + if (this.isNegative()) { + let n = this.negate(); + return "-" + goog_varint_1.int64toString(n.lo, n.hi); + } + return goog_varint_1.int64toString(this.lo, this.hi); } /** - * Stops the timer that displays the stats. As this typically indicates the upload - * is complete, this will display one last line, unless the last line has already - * been written. + * Convert to native bigint. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigInt64(0, true); } }; - exports2.UploadProgress = UploadProgress; - function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { - var _a; - return __awaiter3(this, void 0, void 0, function* () { - const blobClient = new storage_blob_1.BlobClient(signedUploadURL); - const blockBlobClient = blobClient.getBlockBlobClient(); - const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); - const uploadOptions = { - blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, - concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, - maxSingleShotSize: 128 * 1024 * 1024, - onProgress: uploadProgress.onProgress() - }; - try { - uploadProgress.startDisplayTimer(); - core2.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); - const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); - if (response._response.status >= 400) { - throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); - } - return response; - } catch (error) { - core2.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`); - throw error; - } finally { - uploadProgress.stopDisplayTimer(); - } - }); - } - __name(uploadCacheArchiveSDK, "uploadCacheArchiveSDK"); - exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; + exports2.PbLong = PbLong; + PbLong.ZERO = new PbLong(0, 0); } }); -// ../node_modules/@actions/cache/lib/internal/requestUtils.js -var require_requestUtils = __commonJS({ - "../node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js +var require_binary_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var constants_1 = require_constants7(); - function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; - } - __name(isSuccessStatusCode, "isSuccessStatusCode"); - exports2.isSuccessStatusCode = isSuccessStatusCode; - function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; - } - return statusCode >= 500; - } - __name(isServerErrorStatusCode, "isServerErrorStatusCode"); - exports2.isServerErrorStatusCode = isServerErrorStatusCode; - function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; - } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); - } - __name(isRetryableStatusCode, "isRetryableStatusCode"); - exports2.isRetryableStatusCode = isRetryableStatusCode; - function sleep(milliseconds) { - return __awaiter3(this, void 0, void 0, function* () { - return new Promise((resolve) => setTimeout(resolve, milliseconds)); - }); - } - __name(sleep, "sleep"); - function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = void 0) { - return __awaiter3(this, void 0, void 0, function* () { - let errorMessage = ""; - let attempt = 1; - while (attempt <= maxAttempts) { - let response = void 0; - let statusCode = void 0; - let isRetryable = false; - try { - response = yield method(); - } catch (error) { - if (onError) { - response = onError(error); - } - isRetryable = true; - errorMessage = error.message; - } - if (response) { - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - } - if (statusCode) { - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - core2.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core2.debug(`${name} - Error is not retryable`); - break; - } - yield sleep(delay); - attempt++; - } - throw Error(`${name} failed: ${errorMessage}`); - }); - } - __name(retry, "retry"); - exports2.retry = retry; - function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { - return __awaiter3(this, void 0, void 0, function* () { - return yield retry( - name, - method, - (response) => response.statusCode, - maxAttempts, - delay, - // If the error object contains the statusCode property, extract it and return - // an TypedResponse so it can be processed by the retry logic. - (error) => { - if (error instanceof http_client_1.HttpClientError) { - return { - statusCode: error.statusCode, - result: null, - headers: {}, - error - }; - } else { - return void 0; - } - } - ); - }); - } - __name(retryTypedResponse, "retryTypedResponse"); - exports2.retryTypedResponse = retryTypedResponse; - function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { - return __awaiter3(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); - }); + exports2.BinaryReader = exports2.binaryReadOptions = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var defaultsRead = { + readUnknownField: true, + readerFactory: /* @__PURE__ */ __name((bytes) => new BinaryReader(bytes), "readerFactory") + }; + function binaryReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } - __name(retryHttpClientResponse, "retryHttpClientResponse"); - exports2.retryHttpClientResponse = retryHttpClientResponse; - } -}); - -// ../node_modules/@actions/cache/lib/internal/downloadUtils.js -var require_downloadUtils = __commonJS({ - "../node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + __name(binaryReadOptions, "binaryReadOptions"); + exports2.binaryReadOptions = binaryReadOptions; + var BinaryReader = class { + static { + __name(this, "BinaryReader"); } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); + constructor(buf, textDecoder) { + this.varint64 = goog_varint_1.varint64read; + this.uint32 = goog_varint_1.varint32read; + this.buf = buf; + this.len = buf.length; + this.pos = 0; + this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { + fatal: true, + ignoreBOM: true }); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var storage_blob_1 = require_dist4(); - var buffer = __importStar3(require("buffer")); - var fs2 = __importStar3(require("fs")); - var stream = __importStar3(require("stream")); - var util = __importStar3(require("util")); - var utils = __importStar3(require_cacheUtils()); - var constants_1 = require_constants7(); - var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist(); - function pipeResponseToStream(response, output) { - return __awaiter3(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); - }); - } - __name(pipeResponseToStream, "pipeResponseToStream"); - var DownloadProgress = class { - static { - __name(this, "DownloadProgress"); + /** + * Reads a tag - field number and wire type. + */ + tag() { + let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); + return [fieldNo, wireType]; } - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); + /** + * Skip one element on the wire and return the skipped data. + * Supports WireType.StartGroup since v2.0.0-alpha.23. + */ + skip(wireType) { + let start = this.pos; + switch (wireType) { + case binary_format_contract_1.WireType.Varint: + while (this.buf[this.pos++] & 128) { + } + break; + case binary_format_contract_1.WireType.Bit64: + this.pos += 4; + case binary_format_contract_1.WireType.Bit32: + this.pos += 4; + break; + case binary_format_contract_1.WireType.LengthDelimited: + let len = this.uint32(); + this.pos += len; + break; + case binary_format_contract_1.WireType.StartGroup: + let t; + while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { + this.skip(t); + } + break; + default: + throw new Error("cant skip wire type " + wireType); + } + this.assertBounds(); + return this.buf.subarray(start, this.pos); } /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment + * Throws error if position in byte array is out of range. */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core2.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + assertBounds() { + if (this.pos > this.len) + throw new RangeError("premature EOF"); } /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received + * Read a `int32` field, a signed 32 bit varint. */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; + int32() { + return this.uint32() | 0; } /** - * Returns the total number of bytes transferred. + * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; + sint32() { + let zze = this.uint32(); + return zze >>> 1 ^ -(zze & 1); } /** - * Returns true if the download is complete. + * Read a `int64` field, a signed 64-bit varint. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + int64() { + return new pb_long_1.PbLong(...this.varint64()); } /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. + * Read a `uint64` field, an unsigned 64-bit varint. */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core2.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } + uint64() { + return new pb_long_1.PbULong(...this.varint64()); } /** - * Returns a function used to handle TransferProgressEvents. + * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; + sint64() { + let [lo, hi] = this.varint64(); + let s = -(lo & 1); + lo = (lo >>> 1 | (hi & 1) << 31) ^ s; + hi = hi >>> 1 ^ s; + return new pb_long_1.PbLong(lo, hi); } /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Read a `bool` field, a variant. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = /* @__PURE__ */ __name(() => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }, "displayCallback"); - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + bool() { + let [lo, hi] = this.varint64(); + return lo !== 0 || hi !== 0; } /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. + * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); - } - }; - exports2.DownloadProgress = DownloadProgress; - function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter3(this, void 0, void 0, function* () { - const writeStream = fs2.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.get(archiveLocation); - })); - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core2.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - const contentLengthHeader = downloadResponse.message.headers["content-length"]; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } else { - core2.debug("Unable to validate download, no Content-Length header"); - } - }); - } - __name(downloadCacheHttpClient, "downloadCacheHttpClient"); - exports2.downloadCacheHttpClient = downloadCacheHttpClient; - function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { - var _a; - return __awaiter3(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs2.promises.open(archivePath, "w"); - const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { - socketTimeout: options.timeoutInMs, - keepAlive: true - }); - try { - const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter3(this, void 0, void 0, function* () { - return yield httpClient.request("HEAD", archiveLocation, null, {}); - })); - const lengthHeader = res.message.headers["content-length"]; - if (lengthHeader === void 0 || lengthHeader === null) { - throw new Error("Content-Length not found on blob response"); - } - const length = parseInt(lengthHeader); - if (Number.isNaN(length)) { - throw new Error(`Could not interpret Content-Length: ${length}`); - } - const downloads = []; - const blockSize = 4 * 1024 * 1024; - for (let offset = 0; offset < length; offset += blockSize) { - const count = Math.min(blockSize, length - offset); - downloads.push({ - offset, - promiseGetter: /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { - return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); - }), "promiseGetter") - }); - } - downloads.reverse(); - let actives = 0; - let bytesDownloaded = 0; - const progress = new DownloadProgress(length); - progress.startDisplayTimer(); - const progressFn = progress.onProgress(); - const activeDownloads = []; - let nextDownload; - const waitAndWrite = /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { - const segment = yield Promise.race(Object.values(activeDownloads)); - yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); - actives--; - delete activeDownloads[segment.offset]; - bytesDownloaded += segment.count; - progressFn({ loadedBytes: bytesDownloaded }); - }), "waitAndWrite"); - while (nextDownload = downloads.pop()) { - activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); - actives++; - if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { - yield waitAndWrite(); - } - } - while (actives > 0) { - yield waitAndWrite(); - } - } finally { - httpClient.dispose(); - yield archiveDescriptor.close(); - } - }); - } - __name(downloadCacheHttpClientConcurrent, "downloadCacheHttpClientConcurrent"); - exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; - function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { - return __awaiter3(this, void 0, void 0, function* () { - const retries = 5; - let failures = 0; - while (true) { - try { - const timeout = 3e4; - const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); - if (typeof result === "string") { - throw new Error("downloadSegmentRetry failed due to timeout"); - } - return result; - } catch (err) { - if (failures >= retries) { - throw err; - } - failures++; - } - } - }); - } - __name(downloadSegmentRetry, "downloadSegmentRetry"); - function downloadSegment(httpClient, archiveLocation, offset, count) { - return __awaiter3(this, void 0, void 0, function* () { - const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter3(this, void 0, void 0, function* () { - return yield httpClient.get(archiveLocation, { - Range: `bytes=${offset}-${offset + count - 1}` - }); - })); - if (!partRes.readBodyBuffer) { - throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); - } - return { - offset, - count, - buffer: yield partRes.readBodyBuffer() - }; - }); - } - __name(downloadSegment, "downloadSegment"); - function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - var _a; - return __awaiter3(this, void 0, void 0, function* () { - const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - core2.debug("Unable to determine content length, downloading file with http-client..."); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } else { - const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs2.openSync(archivePath, "w"); - try { - downloadProgress.startDisplayTimer(); - const controller = new abort_controller_1.AbortController(); - const abortSignal = controller.signal; - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { - abortSignal, - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - })); - if (result === "timeout") { - controller.abort(); - throw new Error("Aborting cache download as the download time exceeded the timeout."); - } else if (Buffer.isBuffer(result)) { - fs2.writeFileSync(fd, result); - } - } - } finally { - downloadProgress.stopDisplayTimer(); - fs2.closeSync(fd); - } - } - }); - } - __name(downloadCacheStorageSDK, "downloadCacheStorageSDK"); - exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var promiseWithTimeout = /* @__PURE__ */ __name((timeoutMs, promise) => __awaiter3(void 0, void 0, void 0, function* () { - let timeoutHandle; - const timeoutPromise = new Promise((resolve) => { - timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs); - }); - return Promise.race([promise, timeoutPromise]).then((result) => { - clearTimeout(timeoutHandle); - return result; - }); - }), "promiseWithTimeout"); - } -}); - -// ../node_modules/@actions/cache/lib/options.js -var require_options = __commonJS({ - "../node_modules/@actions/cache/lib/options.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core2 = __importStar3(require_core()); - function getUploadOptions(copy) { - const result = { - useAzureSdk: false, - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.uploadConcurrency === "number") { - result.uploadConcurrency = copy.uploadConcurrency; - } - if (typeof copy.uploadChunkSize === "number") { - result.uploadChunkSize = copy.uploadChunkSize; - } - } - result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; - result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core2.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core2.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; - } - __name(getUploadOptions, "getUploadOptions"); - exports2.getUploadOptions = getUploadOptions; - function getDownloadOptions(copy) { - const result = { - useAzureSdk: false, - concurrentBlobDownloads: true, - downloadConcurrency: 8, - timeoutInMs: 3e4, - segmentTimeoutInMs: 6e5, - lookupOnly: false - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.concurrentBlobDownloads === "boolean") { - result.concurrentBlobDownloads = copy.concurrentBlobDownloads; - } - if (typeof copy.downloadConcurrency === "number") { - result.downloadConcurrency = copy.downloadConcurrency; - } - if (typeof copy.timeoutInMs === "number") { - result.timeoutInMs = copy.timeoutInMs; - } - if (typeof copy.segmentTimeoutInMs === "number") { - result.segmentTimeoutInMs = copy.segmentTimeoutInMs; - } - if (typeof copy.lookupOnly === "boolean") { - result.lookupOnly = copy.lookupOnly; - } - } - const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; - if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { - result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; + fixed32() { + return this.view.getUint32((this.pos += 4) - 4, true); } - core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core2.debug(`Download concurrency: ${result.downloadConcurrency}`); - core2.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core2.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core2.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core2.debug(`Lookup only: ${result.lookupOnly}`); - return result; - } - __name(getDownloadOptions, "getDownloadOptions"); - exports2.getDownloadOptions = getDownloadOptions; - } -}); - -// ../node_modules/@actions/cache/lib/internal/config.js -var require_config = __commonJS({ - "../node_modules/@actions/cache/lib/internal/config.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCacheServiceURL = exports2.getCacheServiceVersion = exports2.isGhes = void 0; - function isGhes() { - const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); - const hostname = ghUrl.hostname.trimEnd().toUpperCase(); - const isGitHubHost = hostname === "GITHUB.COM"; - const isGheHost = hostname.endsWith(".GHE.COM"); - const isLocalHost = hostname.endsWith(".LOCALHOST"); - return !isGitHubHost && !isGheHost && !isLocalHost; - } - __name(isGhes, "isGhes"); - exports2.isGhes = isGhes; - function getCacheServiceVersion() { - if (isGhes()) - return "v1"; - return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; - } - __name(getCacheServiceVersion, "getCacheServiceVersion"); - exports2.getCacheServiceVersion = getCacheServiceVersion; - function getCacheServiceURL() { - const version3 = getCacheServiceVersion(); - switch (version3) { - case "v1": - return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; - case "v2": - return process.env["ACTIONS_RESULTS_URL"] || ""; - default: - throw new Error(`Unsupported cache service version: ${version3}`); + /** + * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + */ + sfixed32() { + return this.view.getInt32((this.pos += 4) - 4, true); } - } - __name(getCacheServiceURL, "getCacheServiceURL"); - exports2.getCacheServiceURL = getCacheServiceURL; - } -}); - -// ../node_modules/@actions/cache/package.json -var require_package = __commonJS({ - "../node_modules/@actions/cache/package.json"(exports2, module2) { - module2.exports = { - name: "@actions/cache", - version: "4.0.0", - preview: true, - description: "Actions cache lib", - keywords: [ - "github", - "actions", - "cache" - ], - homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", - license: "MIT", - main: "lib/cache.js", - types: "lib/cache.d.ts", - directories: { - lib: "lib", - test: "__tests__" - }, - files: [ - "lib", - "!.DS_Store" - ], - publishConfig: { - access: "public" - }, - repository: { - type: "git", - url: "git+https://github.com/actions/toolkit.git", - directory: "packages/cache" - }, - scripts: { - "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", - test: 'echo "Error: run tests from root" && exit 1', - tsc: "tsc" - }, - bugs: { - url: "https://github.com/actions/toolkit/issues" - }, - dependencies: { - "@actions/core": "^1.11.1", - "@actions/exec": "^1.0.1", - "@actions/glob": "^0.1.0", - "@actions/http-client": "^2.1.1", - "@actions/io": "^1.0.1", - "@azure/abort-controller": "^1.1.0", - "@azure/ms-rest-js": "^2.6.0", - "@azure/storage-blob": "^12.13.0", - "@protobuf-ts/plugin": "^2.9.4", - semver: "^6.3.1", - "twirp-ts": "^2.5.0" - }, - devDependencies: { - "@types/semver": "^6.0.0", - typescript: "^5.2.2" + /** + * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + */ + fixed64() { + return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + */ + sfixed64() { + return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `float` field, 32-bit floating point number. + */ + float() { + return this.view.getFloat32((this.pos += 4) - 4, true); + } + /** + * Read a `double` field, a 64-bit floating point number. + */ + double() { + return this.view.getFloat64((this.pos += 8) - 8, true); + } + /** + * Read a `bytes` field, length-delimited arbitrary data. + */ + bytes() { + let len = this.uint32(); + let start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); + } + /** + * Read a `string` field, length-delimited data converted to UTF-8 text. + */ + string() { + return this.textDecoder.decode(this.bytes()); } }; + exports2.BinaryReader = BinaryReader; } }); -// ../node_modules/@actions/cache/lib/internal/shared/user-agent.js -var require_user_agent = __commonJS({ - "../node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js +var require_assert = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUserAgentString = void 0; - var packageJson = require_package(); - function getUserAgentString() { - return `@actions/cache-${packageJson.version}`; + exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; + function assert(condition, msg) { + if (!condition) { + throw new Error(msg); + } } - __name(getUserAgentString, "getUserAgentString"); - exports2.getUserAgentString = getUserAgentString; + __name(assert, "assert"); + exports2.assert = assert; + function assertNever(value, msg) { + throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); + } + __name(assertNever, "assertNever"); + exports2.assertNever = assertNever; + var FLOAT32_MAX = 34028234663852886e22; + var FLOAT32_MIN = -34028234663852886e22; + var UINT32_MAX = 4294967295; + var INT32_MAX = 2147483647; + var INT32_MIN = -2147483648; + function assertInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid int 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) + throw new Error("invalid int 32: " + arg); + } + __name(assertInt32, "assertInt32"); + exports2.assertInt32 = assertInt32; + function assertUInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid uint 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) + throw new Error("invalid uint 32: " + arg); + } + __name(assertUInt32, "assertUInt32"); + exports2.assertUInt32 = assertUInt32; + function assertFloat32(arg) { + if (typeof arg !== "number") + throw new Error("invalid float 32: " + typeof arg); + if (!Number.isFinite(arg)) + return; + if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) + throw new Error("invalid float 32: " + arg); + } + __name(assertFloat32, "assertFloat32"); + exports2.assertFloat32 = assertFloat32; } }); -// ../node_modules/@actions/cache/lib/internal/cacheHttpClient.js -var require_cacheHttpClient = __commonJS({ - "../node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js +var require_binary_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var assert_1 = require_assert(); + var defaultsWrite = { + writeUnknownFields: true, + writerFactory: /* @__PURE__ */ __name(() => new BinaryWriter(), "writerFactory") }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + function binaryWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + } + __name(binaryWriteOptions, "binaryWriteOptions"); + exports2.binaryWriteOptions = binaryWriteOptions; + var BinaryWriter = class { + static { + __name(this, "BinaryWriter"); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } + constructor(textEncoder) { + this.stack = []; + this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); + this.chunks = []; + this.buf = []; + } + /** + * Return all bytes written and reset this writer. + */ + finish() { + this.chunks.push(new Uint8Array(this.buf)); + let len = 0; + for (let i = 0; i < this.chunks.length; i++) + len += this.chunks[i].length; + let bytes = new Uint8Array(len); + let offset = 0; + for (let i = 0; i < this.chunks.length; i++) { + bytes.set(this.chunks[i], offset); + offset += this.chunks[i].length; } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + this.chunks = []; + return bytes; + } + /** + * Start a new fork for length-delimited data like a message + * or a packed repeated field. + * + * Must be joined later with `join()`. + */ + fork() { + this.stack.push({ chunks: this.chunks, buf: this.buf }); + this.chunks = []; + this.buf = []; + return this; + } + /** + * Join the last fork. Write its length and bytes, then + * return to the previous state. + */ + join() { + let chunk = this.finish(); + let prev = this.stack.pop(); + if (!prev) + throw new Error("invalid state, fork stack empty"); + this.chunks = prev.chunks; + this.buf = prev.buf; + this.uint32(chunk.byteLength); + return this.raw(chunk); + } + /** + * Writes a tag (field number and wire type). + * + * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * + * Generated code should compute the tag ahead of time and call `uint32()`. + */ + tag(fieldNo, type) { + return this.uint32((fieldNo << 3 | type) >>> 0); + } + /** + * Write a chunk of raw bytes. + */ + raw(chunk) { + if (this.buf.length) { + this.chunks.push(new Uint8Array(this.buf)); + this.buf = []; } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + this.chunks.push(chunk); + return this; + } + /** + * Write a `uint32` value, an unsigned 32 bit varint. + */ + uint32(value) { + assert_1.assertUInt32(value); + while (value > 127) { + this.buf.push(value & 127 | 128); + value = value >>> 7; } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + this.buf.push(value); + return this; + } + /** + * Write a `int32` value, a signed 32 bit varint. + */ + int32(value) { + assert_1.assertInt32(value); + goog_varint_1.varint32write(value, this.buf); + return this; + } + /** + * Write a `bool` value, a variant. + */ + bool(value) { + this.buf.push(value ? 1 : 0); + return this; + } + /** + * Write a `bytes` value, length-delimited arbitrary data. + */ + bytes(value) { + this.uint32(value.byteLength); + return this.raw(value); + } + /** + * Write a `string` value, length-delimited data converted to UTF-8 text. + */ + string(value) { + let chunk = this.textEncoder.encode(value); + this.uint32(chunk.byteLength); + return this.raw(chunk); + } + /** + * Write a `float` value, 32-bit floating point number. + */ + float(value) { + assert_1.assertFloat32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setFloat32(0, value, true); + return this.raw(chunk); + } + /** + * Write a `double` value, a 64-bit floating point number. + */ + double(value) { + let chunk = new Uint8Array(8); + new DataView(chunk.buffer).setFloat64(0, value, true); + return this.raw(chunk); + } + /** + * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + */ + fixed32(value) { + assert_1.assertUInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setUint32(0, value, true); + return this.raw(chunk); + } + /** + * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. + */ + sfixed32(value) { + assert_1.assertInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setInt32(0, value, true); + return this.raw(chunk); + } + /** + * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + */ + sint32(value) { + assert_1.assertInt32(value); + value = (value << 1 ^ value >> 31) >>> 0; + goog_varint_1.varint32write(value, this.buf); + return this; + } + /** + * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + */ + sfixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbLong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); + } + /** + * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + */ + fixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbULong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); + } + /** + * Write a `int64` value, a signed 64-bit varint. + */ + int64(value) { + let long = pb_long_1.PbLong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; + } + /** + * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + */ + sint64(value) { + let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; + goog_varint_1.varint64write(lo, hi, this.buf); + return this; + } + /** + * Write a `uint64` value, an unsigned 64-bit varint. + */ + uint64(value) { + let long = pb_long_1.PbULong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; + } }; + exports2.BinaryWriter = BinaryWriter; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js +var require_json_format_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { + "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var auth_1 = require_auth(); - var fs2 = __importStar3(require("fs")); - var url_1 = require("url"); - var utils = __importStar3(require_cacheUtils()); - var uploadUtils_1 = require_uploadUtils(); - var downloadUtils_1 = require_downloadUtils(); - var options_1 = require_options(); - var requestUtils_1 = require_requestUtils(); - var config_1 = require_config(); - var user_agent_1 = require_user_agent(); - function getCacheApiUrl(resource) { - const baseUrl = (0, config_1.getCacheServiceURL)(); - if (!baseUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); - } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core2.debug(`Resource Url: ${url}`); - return url; - } - __name(getCacheApiUrl, "getCacheApiUrl"); - function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; - } - __name(createAcceptHeader, "createAcceptHeader"); - function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader("application/json", "6.0-preview.1") - } - }; - return requestOptions; - } - __name(getRequestOptions, "getRequestOptions"); - function createHttpClient() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); - } - __name(createHttpClient, "createHttpClient"); - function getCacheEntry(keys, paths, options) { - return __awaiter3(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version3}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 204) { - if (core2.isDebug()) { - yield printCachesListForDiagnostics(keys[0], httpClient, version3); - } - return null; - } - if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); - } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error("Cache not found."); - } - core2.setSecret(cacheDownloadUrl); - core2.debug(`Cache Result:`); - core2.debug(JSON.stringify(cacheResult)); - return cacheResult; - }); - } - __name(getCacheEntry, "getCacheEntry"); - exports2.getCacheEntry = getCacheEntry; - function printCachesListForDiagnostics(key, httpClient, version3) { - return __awaiter3(this, void 0, void 0, function* () { - const resource = `caches?key=${encodeURIComponent(key)}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 200) { - const cacheListResult = response.result; - const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; - if (totalCount && totalCount > 0) { - core2.debug(`No matching cache found for cache key '${key}', version '${version3} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key -Other caches with similar key:`); - for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core2.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); - } - } - } - }); - } - __name(printCachesListForDiagnostics, "printCachesListForDiagnostics"); - function downloadCache(archiveLocation, archivePath, options) { - return __awaiter3(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { - if (downloadOptions.useAzureSdk) { - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); - } else if (downloadOptions.concurrentBlobDownloads) { - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); - } - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); - } - }); + exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; + var defaultsWrite = { + emitDefaultValues: false, + enumAsInteger: false, + useProtoFieldName: false, + prettySpaces: 0 + }; + var defaultsRead = { + ignoreUnknownFields: false + }; + function jsonReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } - __name(downloadCache, "downloadCache"); - exports2.downloadCache = downloadCache; - function reserveCache(key, paths, options) { - return __awaiter3(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const reserveCacheRequest = { - key, - version: version3, - cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize - }; - const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); - })); - return response; - }); + __name(jsonReadOptions, "jsonReadOptions"); + exports2.jsonReadOptions = jsonReadOptions; + function jsonWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } - __name(reserveCache, "reserveCache"); - exports2.reserveCache = reserveCache; - function getContentRange(start, end) { - return `bytes ${start}-${end}/*`; + __name(jsonWriteOptions, "jsonWriteOptions"); + exports2.jsonWriteOptions = jsonWriteOptions; + function mergeJsonOptions(a, b) { + var _a, _b; + let c = Object.assign(Object.assign({}, a), b); + c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; + return c; } - __name(getContentRange, "getContentRange"); - function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter3(this, void 0, void 0, function* () { - core2.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - "Content-Type": "application/octet-stream", - "Content-Range": getContentRange(start, end) - }; - const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - })); - if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { - throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + __name(mergeJsonOptions, "mergeJsonOptions"); + exports2.mergeJsonOptions = mergeJsonOptions; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js +var require_message_type_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.MESSAGE_TYPE = void 0; + exports2.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js +var require_lower_camel_case = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.lowerCamelCase = void 0; + function lowerCamelCase(snakeCase) { + let capNext = false; + const sb = []; + for (let i = 0; i < snakeCase.length; i++) { + let next = snakeCase.charAt(i); + if (next == "_") { + capNext = true; + } else if (/\d/.test(next)) { + sb.push(next); + capNext = true; + } else if (capNext) { + sb.push(next.toUpperCase()); + capNext = false; + } else if (i == 0) { + sb.push(next.toLowerCase()); + } else { + sb.push(next); } - }); + } + return sb.join(""); } - __name(uploadChunk, "uploadChunk"); - function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter3(this, void 0, void 0, function* () { - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs2.openSync(archivePath, "r"); - const uploadOptions = (0, options_1.getUploadOptions)(options); - const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core2.debug("Awaiting all uploads"); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter3(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs2.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }).on("error", (error) => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), start, end); - } - }))); - } finally { - fs2.closeSync(fd); - } - return; - }); + __name(lowerCamelCase, "lowerCamelCase"); + exports2.lowerCamelCase = lowerCamelCase; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js +var require_reflection_info = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; + var lower_camel_case_1 = require_lower_camel_case(); + var ScalarType; + (function(ScalarType2) { + ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; + ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; + ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; + ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; + ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; + ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; + ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; + ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; + ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; + ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; + ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; + ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; + ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; + ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; + ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; + })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); + var LongType; + (function(LongType2) { + LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; + LongType2[LongType2["STRING"] = 1] = "STRING"; + LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; + })(LongType = exports2.LongType || (exports2.LongType = {})); + var RepeatType; + (function(RepeatType2) { + RepeatType2[RepeatType2["NO"] = 0] = "NO"; + RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; + RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; + })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); + function normalizeFieldInfo(field) { + var _a, _b, _c, _d; + field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); + field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); + field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; + field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; + return field; } - __name(uploadFile, "uploadFile"); - function commitCache(httpClient, cacheId, filesize) { - return __awaiter3(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); - }); + __name(normalizeFieldInfo, "normalizeFieldInfo"); + exports2.normalizeFieldInfo = normalizeFieldInfo; + function readFieldOptions(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } - __name(commitCache, "commitCache"); - function saveCache(cacheId, archivePath, signedUploadURL, options) { - return __awaiter3(this, void 0, void 0, function* () { - const uploadOptions = (0, options_1.getUploadOptions)(options); - if (uploadOptions.useAzureSdk) { - if (!signedUploadURL) { - throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); - } - yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); - } else { - const httpClient = createHttpClient(); - core2.debug("Upload cache"); - yield uploadFile(httpClient, cacheId, archivePath, options); - core2.debug("Commiting cache"); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); - } - core2.info("Cache saved successfully"); - } - }); + __name(readFieldOptions, "readFieldOptions"); + exports2.readFieldOptions = readFieldOptions; + function readFieldOption(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; + } + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(saveCache, "saveCache"); - exports2.saveCache = saveCache; + __name(readFieldOption, "readFieldOption"); + exports2.readFieldOption = readFieldOption; + function readMessageOption(messageType, extensionName, extensionType) { + const options = messageType.options; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + __name(readMessageOption, "readMessageOption"); + exports2.readMessageOption = readMessageOption; } }); -// ../node_modules/twirp-ts/build/twirp/context.js -var require_context2 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/context.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js +var require_oneof = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; + function isOneofGroup(any) { + if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { + return false; + } + switch (typeof any.oneofKind) { + case "string": + if (any[any.oneofKind] === void 0) + return false; + return Object.keys(any).length == 2; + case "undefined": + return Object.keys(any).length == 1; + default: + return false; + } + } + __name(isOneofGroup, "isOneofGroup"); + exports2.isOneofGroup = isOneofGroup; + function getOneofValue(oneof, kind) { + return oneof[kind]; + } + __name(getOneofValue, "getOneofValue"); + exports2.getOneofValue = getOneofValue; + function setOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== void 0) { + oneof[kind] = value; + } + } + __name(setOneofValue, "setOneofValue"); + exports2.setOneofValue = setOneofValue; + function setUnknownOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== void 0 && kind !== void 0) { + oneof[kind] = value; + } + } + __name(setUnknownOneofValue, "setUnknownOneofValue"); + exports2.setUnknownOneofValue = setUnknownOneofValue; + function clearOneofValue(oneof) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = void 0; + } + __name(clearOneofValue, "clearOneofValue"); + exports2.clearOneofValue = clearOneofValue; + function getSelectedOneofValue(oneof) { + if (oneof.oneofKind === void 0) { + return void 0; + } + return oneof[oneof.oneofKind]; + } + __name(getSelectedOneofValue, "getSelectedOneofValue"); + exports2.getSelectedOneofValue = getSelectedOneofValue; } }); -// ../node_modules/twirp-ts/build/twirp/hooks.js -var require_hooks = __commonJS({ - "../node_modules/twirp-ts/build/twirp/hooks.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js +var require_reflection_type_check = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionTypeCheck = void 0; + var reflection_info_1 = require_reflection_info(); + var oneof_1 = require_oneof(); + var ReflectionTypeCheck = class { + static { + __name(this, "ReflectionTypeCheck"); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + constructor(info) { + var _a; + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + } + prepare() { + if (this.data) + return; + const req = [], known = [], oneofs = []; + for (let field of this.fields) { + if (field.oneof) { + if (!oneofs.includes(field.oneof)) { + oneofs.push(field.oneof); + req.push(field.oneof); + known.push(field.oneof); + } + } else { + known.push(field.localName); + switch (field.kind) { + case "scalar": + case "enum": + if (!field.opt || field.repeat) + req.push(field.localName); + break; + case "message": + if (field.repeat) + req.push(field.localName); + break; + case "map": + req.push(field.localName); + break; + } } } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + this.data = { req, known, oneofs: Object.values(oneofs) }; + } + /** + * Is the argument a valid message as specified by the + * reflection information? + * + * Checks all field types recursively. The `depth` + * specifies how deep into the structure the check will be. + * + * With a depth of 0, only the presence of fields + * is checked. + * + * With a depth of 1 or more, the field types are checked. + * + * With a depth of 2 or more, the members of map, repeated + * and message fields are checked. + * + * Message fields will be checked recursively with depth - 1. + * + * The number of map entries / repeated values being checked + * is < depth. + */ + is(message, depth, allowExcessProperties = false) { + if (depth < 0) + return true; + if (message === null || message === void 0 || typeof message != "object") + return false; + this.prepare(); + let keys = Object.keys(message), data = this.data; + if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) + return false; + if (!allowExcessProperties) { + if (keys.some((k) => !data.known.includes(k))) + return false; } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + if (depth < 1) { + return true; } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isHook = exports2.chainHooks = void 0; - function chainHooks(...hooks) { - if (hooks.length === 0) { - return null; - } - if (hooks.length === 1) { - return hooks[0]; + for (const name of data.oneofs) { + const group = message[name]; + if (!oneof_1.isOneofGroup(group)) + return false; + if (group.oneofKind === void 0) + continue; + const field = this.fields.find((f) => f.localName === group.oneofKind); + if (!field) + return false; + if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) + return false; + } + for (const field of this.fields) { + if (field.oneof !== void 0) + continue; + if (!this.field(message[field.localName], field, allowExcessProperties, depth)) + return false; + } + return true; } - const serverHook = { - requestReceived(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestReceived) { - continue; - } - yield hook.requestReceived(ctx); - } - }); - }, - requestPrepared(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestPrepared) { - continue; - } - console.warn("hook requestPrepared is deprecated and will be removed in the next release. Please use responsePrepared instead."); - yield hook.requestPrepared(ctx); - } - }); - }, - responsePrepared(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.responsePrepared) { - continue; - } - yield hook.responsePrepared(ctx); - } - }); - }, - requestSent(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestSent) { - continue; - } - console.warn("hook requestSent is deprecated and will be removed in the next release. Please use responseSent instead."); - yield hook.requestSent(ctx); - } - }); - }, - responseSent(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.responseSent) { - continue; - } - yield hook.responseSent(ctx); - } - }); - }, - requestRouted(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestRouted) { - continue; - } - yield hook.requestRouted(ctx); - } - }); - }, - error(ctx, err) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.error) { - continue; - } - yield hook.error(ctx, err); + field(arg, field, allowExcessProperties, depth) { + let repeated = field.repeat; + switch (field.kind) { + case "scalar": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, field.T, depth, field.L); + return this.scalar(arg, field.T, field.L); + case "enum": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); + return this.scalar(arg, reflection_info_1.ScalarType.INT32); + case "message": + if (arg === void 0) + return true; + if (repeated) + return this.messages(arg, field.T(), allowExcessProperties, depth); + return this.message(arg, field.T(), allowExcessProperties, depth); + case "map": + if (typeof arg != "object" || arg === null) + return false; + if (depth < 2) + return true; + if (!this.mapKeys(arg, field.K, depth)) + return false; + switch (field.V.kind) { + case "scalar": + return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); + case "enum": + return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); + case "message": + return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); } - }); + break; } - }; - return serverHook; - } - __name(chainHooks, "chainHooks"); - exports2.chainHooks = chainHooks; - function isHook(object) { - return "requestReceived" in object || "requestPrepared" in object || "requestSent" in object || "requestRouted" in object || "responsePrepared" in object || "responseSent" in object || "error" in object; - } - __name(isHook, "isHook"); - exports2.isHook = isHook; - } -}); - -// ../node_modules/twirp-ts/build/twirp/errors.js -var require_errors3 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/errors.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isValidErrorCode = exports2.httpStatusFromErrorCode = exports2.TwirpErrorCode = exports2.BadRouteError = exports2.InternalServerErrorWith = exports2.InternalServerError = exports2.RequiredArgumentError = exports2.InvalidArgumentError = exports2.NotFoundError = exports2.TwirpError = void 0; - var TwirpError = class _TwirpError extends Error { - static { - __name(this, "TwirpError"); - } - constructor(code, msg) { - super(msg); - this.code = TwirpErrorCode.Internal; - this.meta = {}; - this.code = code; - this.msg = msg; - Object.setPrototypeOf(this, _TwirpError.prototype); - } - /** - * Adds a metadata kv to the error - * @param key - * @param value - */ - withMeta(key, value) { - this.meta[key] = value; - return this; - } - /** - * Returns a single metadata value - * return "" if not found - * @param key - */ - getMeta(key) { - return this.meta[key] || ""; + return true; } - /** - * Add the original error cause - * @param err - * @param addMeta - */ - withCause(err, addMeta = false) { - this._originalCause = err; - if (addMeta) { - this.withMeta("cause", err.message); + message(arg, type, allowExcessProperties, depth) { + if (allowExcessProperties) { + return type.isAssignable(arg, depth); } - return this; - } - cause() { - return this._originalCause; + return type.is(arg, depth); } - /** - * Returns the error representation to JSON - */ - toJSON() { - try { - return JSON.stringify({ - code: this.code, - msg: this.msg, - meta: this.meta - }); - } catch (e) { - return `{"code": "internal", "msg": "There was an error but it could not be serialized into JSON"}`; + messages(arg, type, allowExcessProperties, depth) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (allowExcessProperties) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.isAssignable(arg[i], depth - 1)) + return false; + } else { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.is(arg[i], depth - 1)) + return false; } + return true; } - /** - * Create a twirp error from an object - * @param obj - */ - static fromObject(obj) { - const code = obj["code"] || TwirpErrorCode.Unknown; - const msg = obj["msg"] || "unknown"; - const error = new _TwirpError(code, msg); - if (obj["meta"]) { - Object.keys(obj["meta"]).forEach((key) => { - error.withMeta(key, obj["meta"][key]); - }); + scalar(arg, type, longType) { + let argType = typeof arg; + switch (type) { + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + switch (longType) { + case reflection_info_1.LongType.BIGINT: + return argType == "bigint"; + case reflection_info_1.LongType.NUMBER: + return argType == "number" && !isNaN(arg); + default: + return argType == "string"; + } + case reflection_info_1.ScalarType.BOOL: + return argType == "boolean"; + case reflection_info_1.ScalarType.STRING: + return argType == "string"; + case reflection_info_1.ScalarType.BYTES: + return arg instanceof Uint8Array; + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return argType == "number" && !isNaN(arg); + default: + return argType == "number" && Number.isInteger(arg); } - return error; - } - }; - exports2.TwirpError = TwirpError; - var NotFoundError = class extends TwirpError { - static { - __name(this, "NotFoundError"); - } - constructor(msg) { - super(TwirpErrorCode.NotFound, msg); - } - }; - exports2.NotFoundError = NotFoundError; - var InvalidArgumentError = class extends TwirpError { - static { - __name(this, "InvalidArgumentError"); - } - constructor(argument, validationMsg) { - super(TwirpErrorCode.InvalidArgument, argument + " " + validationMsg); - this.withMeta("argument", argument); - } - }; - exports2.InvalidArgumentError = InvalidArgumentError; - var RequiredArgumentError = class extends InvalidArgumentError { - static { - __name(this, "RequiredArgumentError"); - } - constructor(argument) { - super(argument, "is required"); - } - }; - exports2.RequiredArgumentError = RequiredArgumentError; - var InternalServerError = class extends TwirpError { - static { - __name(this, "InternalServerError"); - } - constructor(msg) { - super(TwirpErrorCode.Internal, msg); } - }; - exports2.InternalServerError = InternalServerError; - var InternalServerErrorWith = class extends InternalServerError { - static { - __name(this, "InternalServerErrorWith"); - } - constructor(err) { - super(err.message); - this.withMeta("cause", err.name); - this.withCause(err); - } - }; - exports2.InternalServerErrorWith = InternalServerErrorWith; - var BadRouteError = class extends TwirpError { - static { - __name(this, "BadRouteError"); + scalars(arg, type, depth, longType) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (Array.isArray(arg)) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!this.scalar(arg[i], type, longType)) + return false; + } + return true; } - constructor(msg, method, url) { - super(TwirpErrorCode.BadRoute, msg); - this.withMeta("twirp_invalid_route", method + " " + url); + mapKeys(map, type, depth) { + let keys = Object.keys(map); + switch (type) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type, depth); + case reflection_info_1.ScalarType.BOOL: + return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type, depth); + default: + return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); + } } }; - exports2.BadRouteError = BadRouteError; - var TwirpErrorCode; - (function(TwirpErrorCode2) { - TwirpErrorCode2["Canceled"] = "canceled"; - TwirpErrorCode2["Unknown"] = "unknown"; - TwirpErrorCode2["InvalidArgument"] = "invalid_argument"; - TwirpErrorCode2["Malformed"] = "malformed"; - TwirpErrorCode2["DeadlineExceeded"] = "deadline_exceeded"; - TwirpErrorCode2["NotFound"] = "not_found"; - TwirpErrorCode2["BadRoute"] = "bad_route"; - TwirpErrorCode2["AlreadyExists"] = "already_exists"; - TwirpErrorCode2["PermissionDenied"] = "permission_denied"; - TwirpErrorCode2["Unauthenticated"] = "unauthenticated"; - TwirpErrorCode2["ResourceExhausted"] = "resource_exhausted"; - TwirpErrorCode2["FailedPrecondition"] = "failed_precondition"; - TwirpErrorCode2["Aborted"] = "aborted"; - TwirpErrorCode2["OutOfRange"] = "out_of_range"; - TwirpErrorCode2["Unimplemented"] = "unimplemented"; - TwirpErrorCode2["Internal"] = "internal"; - TwirpErrorCode2["Unavailable"] = "unavailable"; - TwirpErrorCode2["DataLoss"] = "data_loss"; - })(TwirpErrorCode = exports2.TwirpErrorCode || (exports2.TwirpErrorCode = {})); - function httpStatusFromErrorCode(code) { - switch (code) { - case TwirpErrorCode.Canceled: - return 408; - // RequestTimeout - case TwirpErrorCode.Unknown: - return 500; - // Internal Server Error - case TwirpErrorCode.InvalidArgument: - return 400; - // BadRequest - case TwirpErrorCode.Malformed: - return 400; - // BadRequest - case TwirpErrorCode.DeadlineExceeded: - return 408; - // RequestTimeout - case TwirpErrorCode.NotFound: - return 404; - // Not Found - case TwirpErrorCode.BadRoute: - return 404; - // Not Found - case TwirpErrorCode.AlreadyExists: - return 409; - // Conflict - case TwirpErrorCode.PermissionDenied: - return 403; - // Forbidden - case TwirpErrorCode.Unauthenticated: - return 401; - // Unauthorized - case TwirpErrorCode.ResourceExhausted: - return 429; - // Too Many Requests - case TwirpErrorCode.FailedPrecondition: - return 412; - // Precondition Failed - case TwirpErrorCode.Aborted: - return 409; - // Conflict - case TwirpErrorCode.OutOfRange: - return 400; - // Bad Request - case TwirpErrorCode.Unimplemented: - return 501; - // Not Implemented - case TwirpErrorCode.Internal: - return 500; - // Internal Server Error - case TwirpErrorCode.Unavailable: - return 503; - // Service Unavailable - case TwirpErrorCode.DataLoss: - return 500; - // Internal Server Error + exports2.ReflectionTypeCheck = ReflectionTypeCheck; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js +var require_reflection_long_convert = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionLongConvert = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionLongConvert(long, type) { + switch (type) { + case reflection_info_1.LongType.BIGINT: + return long.toBigInt(); + case reflection_info_1.LongType.NUMBER: + return long.toNumber(); default: - return 0; + return long.toString(); } } - __name(httpStatusFromErrorCode, "httpStatusFromErrorCode"); - exports2.httpStatusFromErrorCode = httpStatusFromErrorCode; - function isValidErrorCode(code) { - return httpStatusFromErrorCode(code) != 0; - } - __name(isValidErrorCode, "isValidErrorCode"); - exports2.isValidErrorCode = isValidErrorCode; + __name(reflectionLongConvert, "reflectionLongConvert"); + exports2.reflectionLongConvert = reflectionLongConvert; } }); -// ../node_modules/twirp-ts/build/twirp/request.js -var require_request3 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/request.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js +var require_reflection_json_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.parseTwirpPath = exports2.getRequestData = exports2.validateRequest = exports2.getContentType = exports2.TwirpContentType = void 0; - var errors_1 = require_errors3(); - var TwirpContentType; - (function(TwirpContentType2) { - TwirpContentType2[TwirpContentType2["Protobuf"] = 0] = "Protobuf"; - TwirpContentType2[TwirpContentType2["JSON"] = 1] = "JSON"; - TwirpContentType2[TwirpContentType2["Unknown"] = 2] = "Unknown"; - })(TwirpContentType = exports2.TwirpContentType || (exports2.TwirpContentType = {})); - function getContentType(mimeType) { - switch (mimeType) { - case "application/protobuf": - return TwirpContentType.Protobuf; - case "application/json": - return TwirpContentType.JSON; - default: - return TwirpContentType.Unknown; + exports2.ReflectionJsonReader = void 0; + var json_typings_1 = require_json_typings(); + var base64_1 = require_base642(); + var reflection_info_1 = require_reflection_info(); + var pb_long_1 = require_pb_long(); + var assert_1 = require_assert(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var ReflectionJsonReader = class { + static { + __name(this, "ReflectionJsonReader"); } - } - __name(getContentType, "getContentType"); - exports2.getContentType = getContentType; - function validateRequest(ctx, request, pathPrefix) { - if (request.method !== "POST") { - const msg = `unsupported method ${request.method} (only POST is allowed)`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + constructor(info) { + this.info = info; } - const path2 = parseTwirpPath(request.url || ""); - if (path2.pkgService !== (ctx.packageName ? ctx.packageName + "." : "") + ctx.serviceName) { - const msg = `no handler for path ${request.url}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + prepare() { + var _a; + if (this.fMap === void 0) { + this.fMap = {}; + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + for (const field of fieldsInput) { + this.fMap[field.name] = field; + this.fMap[field.jsonName] = field; + this.fMap[field.localName] = field; + } + } } - if (path2.prefix !== pathPrefix) { - const msg = `invalid path prefix ${path2.prefix}, expected ${pathPrefix}, on path ${request.url}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + // Cannot parse JSON for #. + assert(condition, fieldName, jsonValue) { + if (!condition) { + let what = json_typings_1.typeofJsonValue(jsonValue); + if (what == "number" || what == "boolean") + what = jsonValue.toString(); + throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); + } } - const mimeContentType = request.headers["content-type"] || ""; - if (ctx.contentType === TwirpContentType.Unknown) { - const msg = `unexpected Content-Type: ${request.headers["content-type"]}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + /** + * Reads a message from canonical JSON format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(input, message, options) { + this.prepare(); + const oneofsHandled = []; + for (const [jsonKey, jsonValue] of Object.entries(input)) { + const field = this.fMap[jsonKey]; + if (!field) { + if (!options.ignoreUnknownFields) + throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); + continue; + } + const localName = field.localName; + let target; + if (field.oneof) { + if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { + continue; + } + if (oneofsHandled.includes(field.oneof)) + throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); + oneofsHandled.push(field.oneof); + target = message[field.oneof] = { + oneofKind: localName + }; + } else { + target = message; + } + if (field.kind == "map") { + if (jsonValue === null) { + continue; + } + this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); + const fieldObj = target[localName]; + for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { + this.assert(jsonObjValue !== null, field.name + " map value", null); + let val; + switch (field.V.kind) { + case "message": + val = field.V.T().internalJsonRead(jsonObjValue, options); + break; + case "enum": + val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); + break; + } + this.assert(val !== void 0, field.name + " map value", jsonObjValue); + let key = jsonObjKey; + if (field.K == reflection_info_1.ScalarType.BOOL) + key = key == "true" ? true : key == "false" ? false : key; + key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); + fieldObj[key] = val; + } + } else if (field.repeat) { + if (jsonValue === null) + continue; + this.assert(Array.isArray(jsonValue), field.name, jsonValue); + const fieldArr = target[localName]; + for (const jsonItem of jsonValue) { + this.assert(jsonItem !== null, field.name, null); + let val; + switch (field.kind) { + case "message": + val = field.T().internalJsonRead(jsonItem, options); + break; + case "enum": + val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonItem, field.T, field.L, field.name); + break; + } + this.assert(val !== void 0, field.name, jsonValue); + fieldArr.push(val); + } + } else { + switch (field.kind) { + case "message": + if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { + this.assert(field.oneof === void 0, field.name + " (oneof member)", null); + continue; + } + target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); + break; + case "enum": + let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + target[localName] = val; + break; + case "scalar": + target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); + break; + } + } + } } - return Object.assign(Object.assign({}, path2), { mimeContentType, contentType: ctx.contentType }); - } - __name(validateRequest, "validateRequest"); - exports2.validateRequest = validateRequest; - function getRequestData(req) { - return new Promise((resolve, reject) => { - const reqWithRawBody = req; - if (reqWithRawBody.rawBody instanceof Buffer) { - resolve(reqWithRawBody.rawBody); - return; + /** + * Returns `false` for unrecognized string representations. + * + * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + */ + enum(type, json, fieldName, ignoreUnknownFields) { + if (type[0] == "google.protobuf.NullValue") + assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); + if (json === null) + return 0; + switch (typeof json) { + case "number": + assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); + return json; + case "string": + let localEnumName = json; + if (type[2] && json.substring(0, type[2].length) === type[2]) + localEnumName = json.substring(type[2].length); + let enumNumber = type[1][localEnumName]; + if (typeof enumNumber === "undefined" && ignoreUnknownFields) { + return false; + } + assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); + return enumNumber; } - const chunks = []; - req.on("data", (chunk) => chunks.push(chunk)); - req.on("end", () => __awaiter3(this, void 0, void 0, function* () { - const data = Buffer.concat(chunks); - resolve(data); - })); - req.on("error", (err) => { - if (req.aborted) { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.DeadlineExceeded, "failed to read request: deadline exceeded")); - } else { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, err.message).withCause(err)); + assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + } + scalar(json, type, longType, fieldName) { + let e; + try { + switch (type) { + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + if (json === null) + return 0; + if (json === "NaN") + return Number.NaN; + if (json === "Infinity") + return Number.POSITIVE_INFINITY; + if (json === "-Infinity") + return Number.NEGATIVE_INFINITY; + if (json === "") { + e = "empty string"; + break; + } + if (typeof json == "string" && json.trim().length !== json.length) { + e = "extra whitespace"; + break; + } + if (typeof json != "string" && typeof json != "number") { + break; + } + let float = Number(json); + if (Number.isNaN(float)) { + e = "not a number"; + break; + } + if (!Number.isFinite(float)) { + e = "too large or small"; + break; + } + if (type == reflection_info_1.ScalarType.FLOAT) + assert_1.assertFloat32(float); + return float; + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + if (json === null) + return 0; + let int32; + if (typeof json == "number") + int32 = json; + else if (json === "") + e = "empty string"; + else if (typeof json == "string") { + if (json.trim().length !== json.length) + e = "extra whitespace"; + else + int32 = Number(json); + } + if (int32 === void 0) + break; + if (type == reflection_info_1.ScalarType.UINT32) + assert_1.assertUInt32(int32); + else + assert_1.assertInt32(int32); + return int32; + // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.UINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); + // bool: + case reflection_info_1.ScalarType.BOOL: + if (json === null) + return false; + if (typeof json !== "boolean") + break; + return json; + // string: + case reflection_info_1.ScalarType.STRING: + if (json === null) + return ""; + if (typeof json !== "string") { + e = "extra whitespace"; + break; + } + try { + encodeURIComponent(json); + } catch (e2) { + e2 = "invalid UTF8"; + break; + } + return json; + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + if (json === null || json === "") + return new Uint8Array(0); + if (typeof json !== "string") + break; + return base64_1.base64decode(json); } - }); - req.on("close", () => { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Canceled, "failed to read request: context canceled")); - }); - }); - } - __name(getRequestData, "getRequestData"); - exports2.getRequestData = getRequestData; - function parseTwirpPath(path2) { - const parts = path2.split("/"); - if (parts.length < 2) { - return { - pkgService: "", - method: "", - prefix: "" - }; + } catch (error) { + e = error.message; + } + this.assert(false, fieldName + (e ? " - " + e : ""), json); } - return { - method: parts[parts.length - 1], - pkgService: parts[parts.length - 2], - prefix: parts.slice(0, parts.length - 2).join("/") - }; - } - __name(parseTwirpPath, "parseTwirpPath"); - exports2.parseTwirpPath = parseTwirpPath; + }; + exports2.ReflectionJsonReader = ReflectionJsonReader; } }); -// ../node_modules/twirp-ts/build/twirp/server.js -var require_server = __commonJS({ - "../node_modules/twirp-ts/build/twirp/server.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js +var require_reflection_json_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.writeError = exports2.TwirpServer = void 0; - var hooks_1 = require_hooks(); - var request_1 = require_request3(); - var errors_1 = require_errors3(); - var TwirpServer = class { + exports2.ReflectionJsonWriter = void 0; + var base64_1 = require_base642(); + var pb_long_1 = require_pb_long(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var ReflectionJsonWriter = class { static { - __name(this, "TwirpServer"); - } - constructor(options) { - this.pathPrefix = "/twirp"; - this.hooks = []; - this.interceptors = []; - this.packageName = options.packageName; - this.serviceName = options.serviceName; - this.methodList = options.methodList; - this.matchRoute = options.matchRoute; - this.service = options.service; + __name(this, "ReflectionJsonWriter"); } - /** - * Returns the prefix for this server - */ - get prefix() { - return this.pathPrefix; + constructor(info) { + var _a; + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; } /** - * The http handler for twirp complaint endpoints - * @param options + * Converts the message to a JSON object, based on the field descriptors. */ - httpHandler(options) { - return (req, resp) => { - if ((options === null || options === void 0 ? void 0 : options.prefix) !== void 0) { - this.withPrefix(options.prefix); + write(message, options) { + const json = {}, source = message; + for (const field of this.fields) { + if (!field.oneof) { + let jsonValue2 = this.field(field, source[field.localName], options); + if (jsonValue2 !== void 0) + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; + continue; } - return this._httpHandler(req, resp); - }; + const group = source[field.oneof]; + if (group.oneofKind !== field.localName) + continue; + const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; + let jsonValue = this.field(field, group[field.localName], opt); + assert_1.assert(jsonValue !== void 0); + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + } + return json; } - /** - * Adds interceptors or hooks to the request stack - * @param middlewares - */ - use(...middlewares) { - middlewares.forEach((middleware) => { - if (hooks_1.isHook(middleware)) { - this.hooks.push(middleware); - return this; + field(field, value, options) { + let jsonValue = void 0; + if (field.kind == "map") { + assert_1.assert(typeof value == "object" && value !== null); + const jsonObj = {}; + switch (field.V.kind) { + case "scalar": + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.scalar(field.V.T, entryValue, field.name, false, true); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "message": + const messageType = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.message(messageType, entryValue, field.name, options); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "enum": + const enumInfo = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); + const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; } - this.interceptors.push(middleware); - }); - return this; - } - /** - * Adds a prefix to the service url path - * @param prefix - */ - withPrefix(prefix) { - if (prefix === false) { - this.pathPrefix = ""; + if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) + jsonValue = jsonObj; + } else if (field.repeat) { + assert_1.assert(Array.isArray(value)); + const jsonArr = []; + switch (field.kind) { + case "scalar": + for (let i = 0; i < value.length; i++) { + const val = this.scalar(field.T, value[i], field.name, field.opt, true); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "enum": + const enumInfo = field.T(); + for (let i = 0; i < value.length; i++) { + assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); + const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "message": + const messageType = field.T(); + for (let i = 0; i < value.length; i++) { + const val = this.message(messageType, value[i], field.name, options); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + } + if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) + jsonValue = jsonArr; } else { - this.pathPrefix = prefix; + switch (field.kind) { + case "scalar": + jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + break; + case "enum": + jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + break; + case "message": + jsonValue = this.message(field.T(), value, field.name, options); + break; + } } - return this; - } - /** - * Returns the regex matching path for this twirp server - */ - matchingPath() { - const baseRegex = this.baseURI().replace(/\./g, "\\."); - return new RegExp(`${baseRegex}/(${this.methodList.join("|")})`); - } - /** - * Returns the base URI for this twirp server - */ - baseURI() { - return `${this.pathPrefix}/${this.packageName ? this.packageName + "." : ""}${this.serviceName}`; + return jsonValue; } /** - * Create a twirp context - * @param req - * @param res - * @private + * Returns `null` as the default for google.protobuf.NullValue. */ - createContext(req, res) { - return { - packageName: this.packageName, - serviceName: this.serviceName, - methodName: "", - contentType: request_1.getContentType(req.headers["content-type"]), - req, - res - }; + enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { + if (type[0] == "google.protobuf.NullValue") + return !emitDefaultValues && !optional ? void 0 : null; + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + if (value === 0 && !emitDefaultValues && !optional) + return void 0; + assert_1.assert(typeof value == "number"); + assert_1.assert(Number.isInteger(value)); + if (enumAsInteger || !type[1].hasOwnProperty(value)) + return value; + if (type[2]) + return type[2] + type[1][value]; + return type[1][value]; } - /** - * Twrip server http handler implementation - * @param req - * @param resp - * @private - */ - _httpHandler(req, resp) { - return __awaiter3(this, void 0, void 0, function* () { - const ctx = this.createContext(req, resp); - try { - yield this.invokeHook("requestReceived", ctx); - const { method, mimeContentType } = request_1.validateRequest(ctx, req, this.pathPrefix || ""); - const handler = this.matchRoute(method, { - onMatch: /* @__PURE__ */ __name((ctx2) => { - return this.invokeHook("requestRouted", ctx2); - }, "onMatch"), - onNotFound: /* @__PURE__ */ __name(() => { - const msg = `no handler for path ${req.url}`; - throw new errors_1.BadRouteError(msg, req.method || "", req.url || ""); - }, "onNotFound") - }); - const body = yield request_1.getRequestData(req); - const response = yield handler(ctx, this.service, body, this.interceptors); - yield Promise.all([ - this.invokeHook("responsePrepared", ctx), - // keep backwards compatibility till next release - this.invokeHook("requestPrepared", ctx) - ]); - resp.statusCode = 200; - resp.setHeader("Content-Type", mimeContentType); - resp.end(response); - } catch (e) { - yield this.invokeHook("error", ctx, mustBeTwirpError(e)); - if (!resp.headersSent) { - writeError(resp, e); - } - } finally { - yield Promise.all([ - this.invokeHook("responseSent", ctx), - // keep backwards compatibility till next release - this.invokeHook("requestSent", ctx) - ]); - } - }); + message(type, value, fieldName, options) { + if (value === void 0) + return options.emitDefaultValues ? null : void 0; + return type.internalJsonWrite(value, options); } - /** - * Invoke a hook - * @param hookName - * @param ctx - * @param err - * @protected - */ - invokeHook(hookName, ctx, err) { - return __awaiter3(this, void 0, void 0, function* () { - if (this.hooks.length === 0) { - return; - } - const chainedHooks = hooks_1.chainHooks(...this.hooks); - const hook = chainedHooks === null || chainedHooks === void 0 ? void 0 : chainedHooks[hookName]; - if (hook) { - yield hook(ctx, err || new errors_1.InternalServerError("internal server error")); - } - }); + scalar(type, value, fieldName, optional, emitDefaultValues) { + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + const ed = emitDefaultValues || optional; + switch (type) { + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertInt32(value); + return value; + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertUInt32(value); + return value; + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.FLOAT: + assert_1.assertFloat32(value); + case reflection_info_1.ScalarType.DOUBLE: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assert(typeof value == "number"); + if (Number.isNaN(value)) + return "NaN"; + if (value === Number.POSITIVE_INFINITY) + return "Infinity"; + if (value === Number.NEGATIVE_INFINITY) + return "-Infinity"; + return value; + // string: + case reflection_info_1.ScalarType.STRING: + if (value === "") + return ed ? "" : void 0; + assert_1.assert(typeof value == "string"); + return value; + // bool: + case reflection_info_1.ScalarType.BOOL: + if (value === false) + return ed ? false : void 0; + assert_1.assert(typeof value == "boolean"); + return value; + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let ulong = pb_long_1.PbULong.from(value); + if (ulong.isZero() && !ed) + return void 0; + return ulong.toString(); + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let long = pb_long_1.PbLong.from(value); + if (long.isZero() && !ed) + return void 0; + return long.toString(); + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + assert_1.assert(value instanceof Uint8Array); + if (!value.byteLength) + return ed ? "" : void 0; + return base64_1.base64encode(value); + } } }; - exports2.TwirpServer = TwirpServer; - function writeError(res, error) { - const twirpError = mustBeTwirpError(error); - res.setHeader("Content-Type", "application/json"); - res.statusCode = errors_1.httpStatusFromErrorCode(twirpError.code); - res.end(twirpError.toJSON()); - } - __name(writeError, "writeError"); - exports2.writeError = writeError; - function mustBeTwirpError(err) { - if (err instanceof errors_1.TwirpError) { - return err; - } - return new errors_1.InternalServerErrorWith(err); - } - __name(mustBeTwirpError, "mustBeTwirpError"); + exports2.ReflectionJsonWriter = ReflectionJsonWriter; } }); -// ../node_modules/twirp-ts/build/twirp/interceptors.js -var require_interceptors = __commonJS({ - "../node_modules/twirp-ts/build/twirp/interceptors.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js +var require_reflection_scalar_default = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.chainInterceptors = void 0; - function chainInterceptors(...interceptors) { - if (interceptors.length === 0) { - return; - } - if (interceptors.length === 1) { - return interceptors[0]; + exports2.reflectionScalarDefault = void 0; + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var pb_long_1 = require_pb_long(); + function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { + switch (type) { + case reflection_info_1.ScalarType.BOOL: + return false; + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return 0; + case reflection_info_1.ScalarType.BYTES: + return new Uint8Array(0); + case reflection_info_1.ScalarType.STRING: + return ""; + default: + return 0; } - const first = interceptors[0]; - return (ctx, request, handler) => __awaiter3(this, void 0, void 0, function* () { - let next = handler; - for (let i = interceptors.length - 1; i > 0; i--) { - next = /* @__PURE__ */ ((next2) => (ctx2, typedRequest) => { - return interceptors[i](ctx2, typedRequest, next2); - })(next); - } - return first(ctx, request, next); - }); } - __name(chainInterceptors, "chainInterceptors"); - exports2.chainInterceptors = chainInterceptors; + __name(reflectionScalarDefault, "reflectionScalarDefault"); + exports2.reflectionScalarDefault = reflectionScalarDefault; } }); -// ../node_modules/dot-object/index.js -var require_dot_object = __commonJS({ - "../node_modules/dot-object/index.js"(exports2, module2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js +var require_reflection_binary_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { "use strict"; - function _process(v, mod) { - var i; - var r; - if (typeof mod === "function") { - r = mod(v); - if (r !== void 0) { - v = r; - } - } else if (Array.isArray(mod)) { - for (i = 0; i < mod.length; i++) { - r = mod[i](v); - if (r !== void 0) { - v = r; - } - } - } - return v; - } - __name(_process, "_process"); - function parseKey(key, val) { - if (key[0] === "-" && Array.isArray(val) && /^-\d+$/.test(key)) { - return val.length + parseInt(key, 10); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionBinaryReader = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var ReflectionBinaryReader = class { + static { + __name(this, "ReflectionBinaryReader"); } - return key; - } - __name(parseKey, "parseKey"); - function isIndex(k) { - return /^\d+$/.test(k); - } - __name(isIndex, "isIndex"); - function isObject(val) { - return Object.prototype.toString.call(val) === "[object Object]"; - } - __name(isObject, "isObject"); - function isArrayOrObject(val) { - return Object(val) === val; - } - __name(isArrayOrObject, "isArrayOrObject"); - function isEmptyObject(val) { - return Object.keys(val).length === 0; - } - __name(isEmptyObject, "isEmptyObject"); - var blacklist = ["__proto__", "prototype", "constructor"]; - var blacklistFilter = /* @__PURE__ */ __name(function(part) { - return blacklist.indexOf(part) === -1; - }, "blacklistFilter"); - function parsePath(path2, sep) { - if (path2.indexOf("[") >= 0) { - path2 = path2.replace(/\[/g, sep).replace(/]/g, ""); + constructor(info) { + this.info = info; } - var parts = path2.split(sep); - var check = parts.filter(blacklistFilter); - if (check.length !== parts.length) { - throw Error("Refusing to update blacklisted property " + path2); + prepare() { + var _a; + if (!this.fieldNoToField) { + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); + } } - return parts; - } - __name(parsePath, "parsePath"); - var hasOwnProperty = Object.prototype.hasOwnProperty; - function DotObject(separator, override, useArray, useBrackets) { - if (!(this instanceof DotObject)) { - return new DotObject(separator, override, useArray, useBrackets); + /** + * Reads a message from binary format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(reader, message, options, length) { + this.prepare(); + const end = length === void 0 ? reader.len : reader.pos + length; + while (reader.pos < end) { + const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); + if (!field) { + let u = options.readUnknownField; + if (u == "throw") + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); + continue; + } + let target = message, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + target = target[field.oneof]; + if (target.oneofKind !== localName) + target = message[field.oneof] = { + oneofKind: localName + }; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + let L = field.kind == "scalar" ? field.L : void 0; + if (repeated) { + let arr = target[localName]; + if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { + let e = reader.uint32() + reader.pos; + while (reader.pos < e) + arr.push(this.scalar(reader, T, L)); + } else + arr.push(this.scalar(reader, T, L)); + } else + target[localName] = this.scalar(reader, T, L); + break; + case "message": + if (repeated) { + let arr = target[localName]; + let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); + arr.push(msg); + } else + target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + break; + case "map": + let [mapKey, mapVal] = this.mapEntry(field, reader, options); + target[localName][mapKey] = mapVal; + break; + } + } } - if (typeof override === "undefined") override = false; - if (typeof useArray === "undefined") useArray = true; - if (typeof useBrackets === "undefined") useBrackets = true; - this.separator = separator || "."; - this.override = override; - this.useArray = useArray; - this.useBrackets = useBrackets; - this.keepArray = false; - this.cleanup = []; - } - __name(DotObject, "DotObject"); - var dotDefault = new DotObject(".", false, true, true); - function wrap(method) { - return function() { - return dotDefault[method].apply(dotDefault, arguments); - }; - } - __name(wrap, "wrap"); - DotObject.prototype._fill = function(a, obj, v, mod) { - var k = a.shift(); - if (a.length > 0) { - obj[k] = obj[k] || (this.useArray && isIndex(a[0]) ? [] : {}); - if (!isArrayOrObject(obj[k])) { - if (this.override) { - obj[k] = {}; - } else { - if (!(isArrayOrObject(v) && isEmptyObject(v))) { - throw new Error( - "Trying to redefine `" + k + "` which is a " + typeof obj[k] - ); - } - return; + /** + * Read a map field, expecting key field = 1, value field = 2 + */ + mapEntry(field, reader, options) { + let length = reader.uint32(); + let end = reader.pos + length; + let key = void 0; + let val = void 0; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + if (field.K == reflection_info_1.ScalarType.BOOL) + key = reader.bool().toString(); + else + key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + break; + case 2: + switch (field.V.kind) { + case "scalar": + val = this.scalar(reader, field.V.T, field.V.L); + break; + case "enum": + val = reader.int32(); + break; + case "message": + val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); + break; + } + break; + default: + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); } } - this._fill(a, obj[k], v, mod); - } else { - if (!this.override && isArrayOrObject(obj[k]) && !isEmptyObject(obj[k])) { - if (!(isArrayOrObject(v) && isEmptyObject(v))) { - throw new Error("Trying to redefine non-empty obj['" + k + "']"); + if (key === void 0) { + let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); + key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + } + if (val === void 0) + switch (field.V.kind) { + case "scalar": + val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + break; + case "enum": + val = 0; + break; + case "message": + val = field.V.T().create(); + break; } - return; + return [key, val]; + } + scalar(reader, type, longType) { + switch (type) { + case reflection_info_1.ScalarType.INT32: + return reader.int32(); + case reflection_info_1.ScalarType.STRING: + return reader.string(); + case reflection_info_1.ScalarType.BOOL: + return reader.bool(); + case reflection_info_1.ScalarType.DOUBLE: + return reader.double(); + case reflection_info_1.ScalarType.FLOAT: + return reader.float(); + case reflection_info_1.ScalarType.INT64: + return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); + case reflection_info_1.ScalarType.UINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); + case reflection_info_1.ScalarType.FIXED32: + return reader.fixed32(); + case reflection_info_1.ScalarType.BYTES: + return reader.bytes(); + case reflection_info_1.ScalarType.UINT32: + return reader.uint32(); + case reflection_info_1.ScalarType.SFIXED32: + return reader.sfixed32(); + case reflection_info_1.ScalarType.SFIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); + case reflection_info_1.ScalarType.SINT32: + return reader.sint32(); + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } - obj[k] = _process(v, mod); } }; - DotObject.prototype.object = function(obj, mods) { - var self2 = this; - Object.keys(obj).forEach(function(k) { - var mod = mods === void 0 ? null : mods[k]; - var ok = parsePath(k, self2.separator).join(self2.separator); - if (ok.indexOf(self2.separator) !== -1) { - self2._fill(ok.split(self2.separator), obj, obj[k], mod); - delete obj[k]; - } else { - obj[k] = _process(obj[k], mod); + exports2.ReflectionBinaryReader = ReflectionBinaryReader; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js +var require_reflection_binary_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionBinaryWriter = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var pb_long_1 = require_pb_long(); + var ReflectionBinaryWriter = class { + static { + __name(this, "ReflectionBinaryWriter"); + } + constructor(info) { + this.info = info; + } + prepare() { + if (!this.fields) { + const fieldsInput = this.info.fields ? this.info.fields.concat() : []; + this.fields = fieldsInput.sort((a, b) => a.no - b.no); } - }); - return obj; - }; - DotObject.prototype.str = function(path2, v, obj, mod) { - var ok = parsePath(path2, this.separator).join(this.separator); - if (path2.indexOf(this.separator) !== -1) { - this._fill(ok.split(this.separator), obj, v, mod); - } else { - obj[path2] = _process(v, mod); } - return obj; - }; - DotObject.prototype.pick = function(path2, obj, remove, reindexArray) { - var i; - var keys; - var val; - var key; - var cp; - keys = parsePath(path2, this.separator); - for (i = 0; i < keys.length; i++) { - key = parseKey(keys[i], obj); - if (obj && typeof obj === "object" && key in obj) { - if (i === keys.length - 1) { - if (remove) { - val = obj[key]; - if (reindexArray && Array.isArray(obj)) { - obj.splice(key, 1); + /** + * Writes the message to binary format. + */ + write(message, writer, options) { + this.prepare(); + for (const field of this.fields) { + let value, emitDefault, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + const group = message[field.oneof]; + if (group.oneofKind !== localName) + continue; + value = group[localName]; + emitDefault = true; + } else { + value = message[localName]; + emitDefault = false; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (repeated) { + assert_1.assert(Array.isArray(value)); + if (repeated == reflection_info_1.RepeatType.PACKED) + this.packed(writer, T, field.no, value); + else + for (const item of value) + this.scalar(writer, T, field.no, item, true); + } else if (value === void 0) + assert_1.assert(field.opt); + else + this.scalar(writer, T, field.no, value, emitDefault || field.opt); + break; + case "message": + if (repeated) { + assert_1.assert(Array.isArray(value)); + for (const item of value) + this.message(writer, options, field.T(), field.no, item); } else { - delete obj[key]; - } - if (Array.isArray(obj)) { - cp = keys.slice(0, -1).join("."); - if (this.cleanup.indexOf(cp) === -1) { - this.cleanup.push(cp); - } + this.message(writer, options, field.T(), field.no, value); } - return val; - } else { - return obj[key]; - } - } else { - obj = obj[key]; + break; + case "map": + assert_1.assert(typeof value == "object" && value !== null); + for (const [key, val] of Object.entries(value)) + this.mapEntry(writer, options, field, key, val); + break; } - } else { - return void 0; } + let u = options.writeUnknownFields; + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); } - if (remove && Array.isArray(obj)) { - obj = obj.filter(function(n) { - return n !== void 0; - }); - } - return obj; - }; - DotObject.prototype.delete = function(path2, obj) { - return this.remove(path2, obj, true); - }; - DotObject.prototype.remove = function(path2, obj, reindexArray) { - var i; - this.cleanup = []; - if (Array.isArray(path2)) { - for (i = 0; i < path2.length; i++) { - this.pick(path2[i], obj, true, reindexArray); - } - if (!reindexArray) { - this._cleanup(obj); + mapEntry(writer, options, field, key, value) { + writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let keyValue = key; + switch (field.K) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + keyValue = Number.parseInt(key); + break; + case reflection_info_1.ScalarType.BOOL: + assert_1.assert(key == "true" || key == "false"); + keyValue = key == "true"; + break; } - return obj; - } else { - return this.pick(path2, obj, true, reindexArray); - } - }; - DotObject.prototype._cleanup = function(obj) { - var ret; - var i; - var keys; - var root; - if (this.cleanup.length) { - for (i = 0; i < this.cleanup.length; i++) { - keys = this.cleanup[i].split("."); - root = keys.splice(0, -1).join("."); - ret = root ? this.pick(root, obj) : obj; - ret = ret[keys[0]].filter(function(v) { - return v !== void 0; - }); - this.set(this.cleanup[i], ret, obj); + this.scalar(writer, field.K, 1, keyValue, true); + switch (field.V.kind) { + case "scalar": + this.scalar(writer, field.V.T, 2, value, true); + break; + case "enum": + this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + break; + case "message": + this.message(writer, options, field.V.T(), 2, value); + break; } - this.cleanup = []; - } - }; - DotObject.prototype.del = DotObject.prototype.remove; - DotObject.prototype.move = function(source, target, obj, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set(target, _process(this.pick(source, obj, true), mods), obj, merge); - } else { - merge = mods; - this.set(target, this.pick(source, obj, true), obj, merge); + writer.join(); } - return obj; - }; - DotObject.prototype.transfer = function(source, target, obj1, obj2, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set( - target, - _process(this.pick(source, obj1, true), mods), - obj2, - merge - ); - } else { - merge = mods; - this.set(target, this.pick(source, obj1, true), obj2, merge); + message(writer, options, handler, fieldNo, value) { + if (value === void 0) + return; + handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); + writer.join(); } - return obj2; - }; - DotObject.prototype.copy = function(source, target, obj1, obj2, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set( - target, - _process( - // clone what is picked - JSON.parse(JSON.stringify(this.pick(source, obj1, false))), - mods - ), - obj2, - merge - ); - } else { - merge = mods; - this.set(target, this.pick(source, obj1, false), obj2, merge); + /** + * Write a single scalar value. + */ + scalar(writer, type, fieldNo, value, emitDefault) { + let [wireType, method, isDefault] = this.scalarInfo(type, value); + if (!isDefault || emitDefault) { + writer.tag(fieldNo, wireType); + writer[method](value); + } } - return obj2; - }; - DotObject.prototype.set = function(path2, val, obj, merge) { - var i; - var k; - var keys; - var key; - if (typeof val === "undefined") { - return obj; + /** + * Write an array of scalar values in packed format. + */ + packed(writer, type, fieldNo, value) { + if (!value.length) + return; + assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); + writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let [, method] = this.scalarInfo(type); + for (let i = 0; i < value.length; i++) + writer[method](value[i]); + writer.join(); } - keys = parsePath(path2, this.separator); - for (i = 0; i < keys.length; i++) { - key = keys[i]; - if (i === keys.length - 1) { - if (merge && isObject(val) && isObject(obj[key])) { - for (k in val) { - if (hasOwnProperty.call(val, k)) { - obj[key][k] = val[k]; - } - } - } else if (merge && Array.isArray(obj[key]) && Array.isArray(val)) { - for (var j = 0; j < val.length; j++) { - obj[keys[i]].push(val[j]); - } - } else { - obj[key] = val; - } - } else if ( - // force the value to be an object - !hasOwnProperty.call(obj, key) || !isObject(obj[key]) && !Array.isArray(obj[key]) - ) { - if (/^\d+$/.test(keys[i + 1])) { - obj[key] = []; - } else { - obj[key] = {}; - } + /** + * Get information for writing a scalar value. + * + * Returns tuple: + * [0]: appropriate WireType + * [1]: name of the appropriate method of IBinaryWriter + * [2]: whether the given value is a default value + * + * If argument `value` is omitted, [2] is always false. + */ + scalarInfo(type, value) { + let t = binary_format_contract_1.WireType.Varint; + let m; + let i = value === void 0; + let d = value === 0; + switch (type) { + case reflection_info_1.ScalarType.INT32: + m = "int32"; + break; + case reflection_info_1.ScalarType.STRING: + d = i || !value.length; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "string"; + break; + case reflection_info_1.ScalarType.BOOL: + d = value === false; + m = "bool"; + break; + case reflection_info_1.ScalarType.UINT32: + m = "uint32"; + break; + case reflection_info_1.ScalarType.DOUBLE: + t = binary_format_contract_1.WireType.Bit64; + m = "double"; + break; + case reflection_info_1.ScalarType.FLOAT: + t = binary_format_contract_1.WireType.Bit32; + m = "float"; + break; + case reflection_info_1.ScalarType.INT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "int64"; + break; + case reflection_info_1.ScalarType.UINT64: + d = i || pb_long_1.PbULong.from(value).isZero(); + m = "uint64"; + break; + case reflection_info_1.ScalarType.FIXED64: + d = i || pb_long_1.PbULong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "fixed64"; + break; + case reflection_info_1.ScalarType.BYTES: + d = i || !value.byteLength; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "bytes"; + break; + case reflection_info_1.ScalarType.FIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "fixed32"; + break; + case reflection_info_1.ScalarType.SFIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "sfixed32"; + break; + case reflection_info_1.ScalarType.SFIXED64: + d = i || pb_long_1.PbLong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "sfixed64"; + break; + case reflection_info_1.ScalarType.SINT32: + m = "sint32"; + break; + case reflection_info_1.ScalarType.SINT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "sint64"; + break; } - obj = obj[key]; + return [t, m, i || d]; } - return obj; - }; - DotObject.prototype.transform = function(recipe, obj, tgt) { - obj = obj || {}; - tgt = tgt || {}; - Object.keys(recipe).forEach( - function(key) { - this.set(recipe[key], this.pick(key, obj), tgt); - }.bind(this) - ); - return tgt; - }; - DotObject.prototype.dot = function(obj, tgt, path2) { - tgt = tgt || {}; - path2 = path2 || []; - var isArray = Array.isArray(obj); - Object.keys(obj).forEach( - function(key) { - var index = isArray && this.useBrackets ? "[" + key + "]" : key; - if (isArrayOrObject(obj[key]) && (isObject(obj[key]) && !isEmptyObject(obj[key]) || Array.isArray(obj[key]) && !this.keepArray && obj[key].length !== 0)) { - if (isArray && this.useBrackets) { - var previousKey = path2[path2.length - 1] || ""; - return this.dot( - obj[key], - tgt, - path2.slice(0, -1).concat(previousKey + index) - ); - } else { - return this.dot(obj[key], tgt, path2.concat(index)); - } - } else { - if (isArray && this.useBrackets) { - tgt[path2.join(this.separator).concat("[" + key + "]")] = obj[key]; - } else { - tgt[path2.concat(index).join(this.separator)] = obj[key]; - } - } - }.bind(this) - ); - return tgt; }; - DotObject.pick = wrap("pick"); - DotObject.move = wrap("move"); - DotObject.transfer = wrap("transfer"); - DotObject.transform = wrap("transform"); - DotObject.copy = wrap("copy"); - DotObject.object = wrap("object"); - DotObject.str = wrap("str"); - DotObject.set = wrap("set"); - DotObject.delete = wrap("delete"); - DotObject.del = DotObject.remove = wrap("remove"); - DotObject.dot = wrap("dot"); - ["override", "overwrite"].forEach(function(prop) { - Object.defineProperty(DotObject, prop, { - get: /* @__PURE__ */ __name(function() { - return dotDefault.override; - }, "get"), - set: /* @__PURE__ */ __name(function(val) { - dotDefault.override = !!val; - }, "set") - }); - }); - ["useArray", "keepArray", "useBrackets"].forEach(function(prop) { - Object.defineProperty(DotObject, prop, { - get: /* @__PURE__ */ __name(function() { - return dotDefault[prop]; - }, "get"), - set: /* @__PURE__ */ __name(function(val) { - dotDefault[prop] = val; - }, "set") - }); - }); - DotObject._process = _process; - module2.exports = DotObject; + exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; } }); -// ../node_modules/twirp-ts/build/twirp/http.client.js -var require_http_client = __commonJS({ - "../node_modules/twirp-ts/build/twirp/http.client.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js +var require_reflection_create = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.FetchRPC = exports2.wrapErrorResponseToTwirpError = exports2.NodeHttpRPC = void 0; - var http = __importStar3(require("http")); - var https = __importStar3(require("https")); - var url_1 = require("url"); - var errors_1 = require_errors3(); - var NodeHttpRPC = /* @__PURE__ */ __name((options) => ({ - request(service, method, contentType, data) { - let client; - return new Promise((resolve, rejected) => { - const responseChunks = []; - const requestData = contentType === "application/protobuf" ? Buffer.from(data) : JSON.stringify(data); - const url = new url_1.URL(options.baseUrl); - const isHttps = url.protocol === "https:"; - if (isHttps) { - client = https; - } else { - client = http; - } - const prefix = url.pathname !== "/" ? url.pathname : ""; - const req = client.request(Object.assign(Object.assign({}, options ? options : {}), { method: "POST", protocol: url.protocol, host: url.hostname, port: url.port ? url.port : isHttps ? 443 : 80, path: `${prefix}/${service}/${method}`, headers: Object.assign(Object.assign({}, options.headers ? options.headers : {}), { "Content-Type": contentType, "Content-Length": contentType === "application/protobuf" ? Buffer.byteLength(requestData) : Buffer.from(requestData).byteLength }) }), (res) => { - res.on("data", (chunk) => responseChunks.push(chunk)); - res.on("end", () => { - const data2 = Buffer.concat(responseChunks); - if (res.statusCode != 200) { - rejected(wrapErrorResponseToTwirpError(data2.toString())); - } else { - if (contentType === "application/json") { - resolve(JSON.parse(data2.toString())); - } else { - resolve(data2); - } - } - }); - res.on("error", (err) => { - rejected(err); - }); - }).on("error", (err) => { - rejected(err); - }); - req.end(requestData); - }); - } - }), "NodeHttpRPC"); - exports2.NodeHttpRPC = NodeHttpRPC; - function wrapErrorResponseToTwirpError(errorResponse) { - return errors_1.TwirpError.fromObject(JSON.parse(errorResponse)); - } - __name(wrapErrorResponseToTwirpError, "wrapErrorResponseToTwirpError"); - exports2.wrapErrorResponseToTwirpError = wrapErrorResponseToTwirpError; - var FetchRPC = /* @__PURE__ */ __name((options) => ({ - request(service, method, contentType, data) { - return __awaiter3(this, void 0, void 0, function* () { - const headers = new Headers(options.headers); - headers.set("content-type", contentType); - const response = yield fetch(`${options.baseUrl}/${service}/${method}`, Object.assign(Object.assign({}, options), { method: "POST", headers, body: data instanceof Uint8Array ? data : JSON.stringify(data) })); - if (response.status === 200) { - if (contentType === "application/json") { - return yield response.json(); - } - return new Uint8Array(yield response.arrayBuffer()); + exports2.reflectionCreate = void 0; + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var message_type_contract_1 = require_message_type_contract(); + function reflectionCreate(type) { + const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); + for (let field of type.fields) { + let name = field.localName; + if (field.opt) + continue; + if (field.oneof) + msg[field.oneof] = { oneofKind: void 0 }; + else if (field.repeat) + msg[name] = []; + else + switch (field.kind) { + case "scalar": + msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + break; + case "enum": + msg[name] = 0; + break; + case "map": + msg[name] = {}; + break; } - throw errors_1.TwirpError.fromObject(yield response.json()); - }); } - }), "FetchRPC"); - exports2.FetchRPC = FetchRPC; + return msg; + } + __name(reflectionCreate, "reflectionCreate"); + exports2.reflectionCreate = reflectionCreate; } }); -// ../node_modules/twirp-ts/build/twirp/gateway.js -var require_gateway = __commonJS({ - "../node_modules/twirp-ts/build/twirp/gateway.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js +var require_reflection_merge_partial = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionMergePartial = void 0; + function reflectionMergePartial(info, target, source) { + let fieldValue, input = source, output; + for (let field of info.fields) { + let name = field.localName; + if (field.oneof) { + const group = input[field.oneof]; + if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { + continue; } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + fieldValue = group[name]; + output = target[field.oneof]; + output.oneofKind = group.oneofKind; + if (fieldValue == void 0) { + delete output[name]; + continue; + } + } else { + fieldValue = input[name]; + output = target; + if (fieldValue == void 0) { + continue; } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + if (field.repeat) + output[name].length = fieldValue.length; + switch (field.kind) { + case "scalar": + case "enum": + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = fieldValue[i]; + else + output[name] = fieldValue; + break; + case "message": + let T = field.T(); + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = T.create(fieldValue[i]); + else if (output[name] === void 0) + output[name] = T.create(fieldValue); + else + T.mergePartial(output[name], fieldValue); + break; + case "map": + switch (field.V.kind) { + case "scalar": + case "enum": + Object.assign(output[name], fieldValue); + break; + case "message": + let T2 = field.V.T(); + for (let k of Object.keys(fieldValue)) + output[name][k] = T2.create(fieldValue[k]); + break; + } + break; } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __rest2 = exports2 && exports2.__rest || function(s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; + } + } + __name(reflectionMergePartial, "reflectionMergePartial"); + exports2.reflectionMergePartial = reflectionMergePartial; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js +var require_reflection_equals = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionEquals = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionEquals(info, a, b) { + if (a === b) + return true; + if (!a || !b) + return false; + for (let field of info.fields) { + let localName = field.localName; + let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; + let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; + switch (field.kind) { + case "enum": + case "scalar": + let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) + return false; + break; + case "map": + if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) + return false; + break; + case "message": + let T = field.T(); + if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) + return false; + break; } - return t; - }; + } + return true; + } + __name(reflectionEquals, "reflectionEquals"); + exports2.reflectionEquals = reflectionEquals; + var objectValues = Object.values; + function primitiveEq(type, a, b) { + if (a === b) + return true; + if (type !== reflection_info_1.ScalarType.BYTES) + return false; + let ba = a; + let bb = b; + if (ba.length !== bb.length) + return false; + for (let i = 0; i < ba.length; i++) + if (ba[i] != bb[i]) + return false; + return true; + } + __name(primitiveEq, "primitiveEq"); + function repeatedPrimitiveEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!primitiveEq(type, a[i], b[i])) + return false; + return true; + } + __name(repeatedPrimitiveEq, "repeatedPrimitiveEq"); + function repeatedMsgEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!type.equals(a[i], b[i])) + return false; + return true; + } + __name(repeatedMsgEq, "repeatedMsgEq"); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js +var require_message_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { + "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Gateway = exports2.Pattern = void 0; - var querystring_1 = require("querystring"); - var dotObject = __importStar3(require_dot_object()); - var request_1 = require_request3(); - var errors_1 = require_errors3(); - var http_client_1 = require_http_client(); - var server_1 = require_server(); - var Pattern; - (function(Pattern2) { - Pattern2["POST"] = "post"; - Pattern2["GET"] = "get"; - Pattern2["PATCH"] = "patch"; - Pattern2["PUT"] = "put"; - Pattern2["DELETE"] = "delete"; - })(Pattern = exports2.Pattern || (exports2.Pattern = {})); - var Gateway = class { + exports2.MessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_type_check_1 = require_reflection_type_check(); + var reflection_json_reader_1 = require_reflection_json_reader(); + var reflection_json_writer_1 = require_reflection_json_writer(); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + var reflection_create_1 = require_reflection_create(); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + var json_typings_1 = require_json_typings(); + var json_format_contract_1 = require_json_format_contract(); + var reflection_equals_1 = require_reflection_equals(); + var binary_writer_1 = require_binary_writer(); + var binary_reader_1 = require_binary_reader(); + var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); + var MessageType = class { static { - __name(this, "Gateway"); + __name(this, "MessageType"); } - constructor(routes) { - this.routes = routes; + constructor(name, fields, options) { + this.defaultCheckDepth = 16; + this.typeName = name; + this.fields = fields.map(reflection_info_1.normalizeFieldInfo); + this.options = options !== null && options !== void 0 ? options : {}; + this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); + this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); + this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); + this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); + this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); + this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + } + create(value) { + let message = reflection_create_1.reflectionCreate(this); + if (value !== void 0) { + reflection_merge_partial_1.reflectionMergePartial(this, message, value); + } + return message; } /** - * Middleware that rewrite the current request - * to a Twirp compliant request + * Clone the message. + * + * Unknown fields are discarded. */ - twirpRewrite(prefix = "/twirp") { - return (req, resp, next) => { - this.rewrite(req, resp, prefix).then(() => next()).catch((e) => { - if (e instanceof errors_1.TwirpError) { - if (e.code !== errors_1.TwirpErrorCode.NotFound) { - server_1.writeError(resp, e); - } else { - next(); - } - } - }); - }; + clone(message) { + let copy = this.create(); + reflection_merge_partial_1.reflectionMergePartial(this, copy, message); + return copy; } /** - * Rewrite an incoming request to a Twirp compliant request - * @param req - * @param resp - * @param prefix + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. */ - rewrite(req, resp, prefix = "/twirp") { - return __awaiter3(this, void 0, void 0, function* () { - const [match, route] = this.matchRoute(req); - const body = yield this.prepareTwirpBody(req, match, route); - const twirpUrl = `${prefix}/${route.packageName}.${route.serviceName}/${route.methodName}`; - req.url = twirpUrl; - req.originalUrl = twirpUrl; - req.method = "POST"; - req.headers["content-type"] = "application/json"; - req.rawBody = Buffer.from(JSON.stringify(body)); - if (route.responseBodyKey) { - const endFn = resp.end.bind(resp); - resp.end = function(chunk) { - if (resp.statusCode === 200) { - endFn(`{ "${route.responseBodyKey}": ${chunk} }`); - } else { - endFn(chunk); - } - }; - } - }); + equals(a, b) { + return reflection_equals_1.reflectionEquals(this, a, b); } /** - * Create a reverse proxy handler to - * proxy http requests to Twirp Compliant handlers - * @param httpClientOption + * Is the given value assignable to our message type + * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - reverseProxy(httpClientOption) { - const client = http_client_1.NodeHttpRPC(httpClientOption); - return (req, res) => __awaiter3(this, void 0, void 0, function* () { - try { - const [match, route] = this.matchRoute(req); - const body = yield this.prepareTwirpBody(req, match, route); - const response = yield client.request(`${route.packageName}.${route.serviceName}`, route.methodName, "application/json", body); - res.statusCode = 200; - res.setHeader("content-type", "application/json"); - let jsonResponse; - if (route.responseBodyKey) { - jsonResponse = JSON.stringify({ [route.responseBodyKey]: response }); - } else { - jsonResponse = JSON.stringify(response); - } - res.end(jsonResponse); - } catch (e) { - server_1.writeError(res, e); - } - }); + is(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, false); } /** - * Prepares twirp body requests using http.google.annotions - * compliant spec - * - * @param req - * @param match - * @param route - * @protected + * Is the given value assignable to our message type, + * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - prepareTwirpBody(req, match, route) { - return __awaiter3(this, void 0, void 0, function* () { - const _a = match.params, { query_string } = _a, params = __rest2(_a, ["query_string"]); - let requestBody = Object.assign({}, params); - if (query_string && route.bodyKey !== "*") { - const queryParams = this.parseQueryString(query_string); - requestBody = Object.assign(Object.assign({}, queryParams), requestBody); - } - let body = {}; - if (route.bodyKey) { - const data = yield request_1.getRequestData(req); - try { - const jsonBody = JSON.parse(data.toString() || "{}"); - if (route.bodyKey === "*") { - body = jsonBody; - } else { - body[route.bodyKey] = jsonBody; - } - } catch (e) { - const msg = "the json request could not be decoded"; - throw new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - return Object.assign(Object.assign({}, body), requestBody); - }); + isAssignable(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, true); } /** - * Matches a route - * @param req + * Copy partial data into the target message. */ - matchRoute(req) { + mergePartial(target, source) { + reflection_merge_partial_1.reflectionMergePartial(this, target, source); + } + /** + * Create a new message from binary format. + */ + fromBinary(data, options) { + let opt = binary_reader_1.binaryReadOptions(options); + return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + } + /** + * Read a new message from a JSON value. + */ + fromJson(json, options) { + return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); + } + /** + * Read a new message from a JSON string. + * This is equivalent to `T.fromJson(JSON.parse(json))`. + */ + fromJsonString(json, options) { + let value = JSON.parse(json); + return this.fromJson(value, options); + } + /** + * Write the message to canonical JSON value. + */ + toJson(message, options) { + return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + } + /** + * Convert the message to canonical JSON string. + * This is equivalent to `JSON.stringify(T.toJson(t))` + */ + toJsonString(message, options) { var _a; - const httpMethod = (_a = req.method) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!httpMethod) { - throw new errors_1.BadRouteError(`method not allowed`, req.method || "", req.url || ""); - } - const routes = this.routes[httpMethod]; - for (const route of routes) { - const match = route.matcher(req.url || "/"); - if (match) { - return [match, route]; - } + let value = this.toJson(message, options); + return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + } + /** + * Write the message to binary format. + */ + toBinary(message, options) { + let opt = binary_writer_1.binaryWriteOptions(options); + return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + } + /** + * This is an internal method. If you just want to read a message from + * JSON, use `fromJson()` or `fromJsonString()`. + * + * Reads JSON value and merges the fields into the target + * according to protobuf rules. If the target is omitted, + * a new instance is created first. + */ + internalJsonRead(json, options, target) { + if (json !== null && typeof json == "object" && !Array.isArray(json)) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refJsonReader.read(json, message, options); + return message; } - throw new errors_1.NotFoundError(`url ${req.url} not found`); + throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); + } + /** + * This is an internal method. If you just want to write a message + * to JSON, use `toJson()` or `toJsonString(). + * + * Writes JSON value and returns it. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.write(message, options); + } + /** + * This is an internal method. If you just want to write a message + * in binary format, use `toBinary()`. + * + * Serializes the message in binary format and appends it to the given + * writer. Returns passed writer. + */ + internalBinaryWrite(message, writer, options) { + this.refBinWriter.write(message, writer, options); + return writer; + } + /** + * This is an internal method. If you just want to read a message from + * binary data, use `fromBinary()`. + * + * Reads data from binary format and merges the fields into + * the target according to protobuf rules. If the target is + * omitted, a new instance is created first. + */ + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refBinReader.read(reader, message, options, length); + return message; + } + }; + exports2.MessageType = MessageType; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js +var require_reflection_contains_message_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.containsMessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + function containsMessageType(msg) { + return msg[message_type_contract_1.MESSAGE_TYPE] != null; + } + __name(containsMessageType, "containsMessageType"); + exports2.containsMessageType = containsMessageType; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js +var require_enum_object = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; + function isEnumObject(arg) { + if (typeof arg != "object" || arg === null) { + return false; } - /** - * Parse query string - * @param queryString - */ - parseQueryString(queryString) { - const queryParams = querystring_1.parse(queryString.replace("?", "")); - return dotObject.object(queryParams); + if (!arg.hasOwnProperty(0)) { + return false; } - }; - exports2.Gateway = Gateway; + for (let k of Object.keys(arg)) { + let num = parseInt(k); + if (!Number.isNaN(num)) { + let nam = arg[num]; + if (nam === void 0) + return false; + if (arg[nam] !== num) + return false; + } else { + let num2 = arg[k]; + if (num2 === void 0) + return false; + if (typeof num2 !== "number") + return false; + if (arg[num2] === void 0) + return false; + } + } + return true; + } + __name(isEnumObject, "isEnumObject"); + exports2.isEnumObject = isEnumObject; + function listEnumValues(enumObject) { + if (!isEnumObject(enumObject)) + throw new Error("not a typescript enum object"); + let values = []; + for (let [name, number] of Object.entries(enumObject)) + if (typeof number == "number") + values.push({ name, number }); + return values; + } + __name(listEnumValues, "listEnumValues"); + exports2.listEnumValues = listEnumValues; + function listEnumNames(enumObject) { + return listEnumValues(enumObject).map((val) => val.name); + } + __name(listEnumNames, "listEnumNames"); + exports2.listEnumNames = listEnumNames; + function listEnumNumbers(enumObject) { + return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); + } + __name(listEnumNumbers, "listEnumNumbers"); + exports2.listEnumNumbers = listEnumNumbers; } }); -// ../node_modules/twirp-ts/build/twirp/index.js -var require_twirp = __commonJS({ - "../node_modules/twirp-ts/build/twirp/index.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/index.js +var require_commonjs7 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding3(exports3, m, p); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TwirpContentType = void 0; - __exportStar2(require_context2(), exports2); - __exportStar2(require_server(), exports2); - __exportStar2(require_interceptors(), exports2); - __exportStar2(require_hooks(), exports2); - __exportStar2(require_errors3(), exports2); - __exportStar2(require_gateway(), exports2); - __exportStar2(require_http_client(), exports2); - var request_1 = require_request3(); - Object.defineProperty(exports2, "TwirpContentType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return request_1.TwirpContentType; + var json_typings_1 = require_json_typings(); + Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_typings_1.typeofJsonValue; + }, "get") }); + Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_typings_1.isJsonObject; + }, "get") }); + var base64_1 = require_base642(); + Object.defineProperty(exports2, "base64decode", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return base64_1.base64decode; + }, "get") }); + Object.defineProperty(exports2, "base64encode", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return base64_1.base64encode; + }, "get") }); + var protobufjs_utf8_1 = require_protobufjs_utf8(); + Object.defineProperty(exports2, "utf8read", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return protobufjs_utf8_1.utf8read; + }, "get") }); + var binary_format_contract_1 = require_binary_format_contract(); + Object.defineProperty(exports2, "WireType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.WireType; + }, "get") }); + Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.mergeBinaryOptions; + }, "get") }); + Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.UnknownFieldHandler; + }, "get") }); + var binary_reader_1 = require_binary_reader(); + Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_reader_1.BinaryReader; + }, "get") }); + Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_reader_1.binaryReadOptions; + }, "get") }); + var binary_writer_1 = require_binary_writer(); + Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_writer_1.BinaryWriter; + }, "get") }); + Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_writer_1.binaryWriteOptions; + }, "get") }); + var pb_long_1 = require_pb_long(); + Object.defineProperty(exports2, "PbLong", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return pb_long_1.PbLong; + }, "get") }); + Object.defineProperty(exports2, "PbULong", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return pb_long_1.PbULong; + }, "get") }); + var json_format_contract_1 = require_json_format_contract(); + Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.jsonReadOptions; + }, "get") }); + Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.jsonWriteOptions; + }, "get") }); + Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.mergeJsonOptions; + }, "get") }); + var message_type_contract_1 = require_message_type_contract(); + Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return message_type_contract_1.MESSAGE_TYPE; + }, "get") }); + var message_type_1 = require_message_type(); + Object.defineProperty(exports2, "MessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return message_type_1.MessageType; + }, "get") }); + var reflection_info_1 = require_reflection_info(); + Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.ScalarType; + }, "get") }); + Object.defineProperty(exports2, "LongType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.LongType; + }, "get") }); + Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.RepeatType; + }, "get") }); + Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.normalizeFieldInfo; + }, "get") }); + Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readFieldOptions; + }, "get") }); + Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readFieldOption; + }, "get") }); + Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMessageOption; + }, "get") }); + var reflection_type_check_1 = require_reflection_type_check(); + Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_type_check_1.ReflectionTypeCheck; + }, "get") }); + var reflection_create_1 = require_reflection_create(); + Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_create_1.reflectionCreate; + }, "get") }); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_scalar_default_1.reflectionScalarDefault; + }, "get") }); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_merge_partial_1.reflectionMergePartial; + }, "get") }); + var reflection_equals_1 = require_reflection_equals(); + Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_equals_1.reflectionEquals; + }, "get") }); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_binary_reader_1.ReflectionBinaryReader; + }, "get") }); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_binary_writer_1.ReflectionBinaryWriter; + }, "get") }); + var reflection_json_reader_1 = require_reflection_json_reader(); + Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_json_reader_1.ReflectionJsonReader; + }, "get") }); + var reflection_json_writer_1 = require_reflection_json_writer(); + Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_json_writer_1.ReflectionJsonWriter; + }, "get") }); + var reflection_contains_message_type_1 = require_reflection_contains_message_type(); + Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_contains_message_type_1.containsMessageType; + }, "get") }); + var oneof_1 = require_oneof(); + Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.isOneofGroup; + }, "get") }); + Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.setOneofValue; + }, "get") }); + Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.getOneofValue; + }, "get") }); + Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.clearOneofValue; + }, "get") }); + Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.getSelectedOneofValue; + }, "get") }); + var enum_object_1 = require_enum_object(); + Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumValues; + }, "get") }); + Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumNames; + }, "get") }); + Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumNumbers; + }, "get") }); + Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.isEnumObject; + }, "get") }); + var lower_camel_case_1 = require_lower_camel_case(); + Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return lower_camel_case_1.lowerCamelCase; + }, "get") }); + var assert_1 = require_assert(); + Object.defineProperty(exports2, "assert", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assert; + }, "get") }); + Object.defineProperty(exports2, "assertNever", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertNever; + }, "get") }); + Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertInt32; + }, "get") }); + Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertUInt32; + }, "get") }); + Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertFloat32; }, "get") }); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js -var require_json_typings = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js +var require_reflection_info2 = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isJsonObject = exports2.typeofJsonValue = void 0; - function typeofJsonValue(value) { - let t = typeof value; - if (t == "object") { - if (Array.isArray(value)) - return "array"; - if (value === null) - return "null"; + exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; + var runtime_1 = require_commonjs7(); + function normalizeMethodInfo(method, service) { + var _a, _b, _c; + let m = method; + m.service = service; + m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); + m.serverStreaming = !!m.serverStreaming; + m.clientStreaming = !!m.clientStreaming; + m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; + m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; + return m; + } + __name(normalizeMethodInfo, "normalizeMethodInfo"); + exports2.normalizeMethodInfo = normalizeMethodInfo; + function readMethodOptions(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + } + __name(readMethodOptions, "readMethodOptions"); + exports2.readMethodOptions = readMethodOptions; + function readMethodOption(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; } - return t; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(typeofJsonValue, "typeofJsonValue"); - exports2.typeofJsonValue = typeofJsonValue; - function isJsonObject(value) { - return value !== null && typeof value == "object" && !Array.isArray(value); + __name(readMethodOption, "readMethodOption"); + exports2.readMethodOption = readMethodOption; + function readServiceOption(service, extensionName, extensionType) { + const options = service.options; + if (!options) { + return void 0; + } + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(isJsonObject, "isJsonObject"); - exports2.isJsonObject = isJsonObject; + __name(readServiceOption, "readServiceOption"); + exports2.readServiceOption = readServiceOption; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js -var require_base642 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js +var require_service_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.base64encode = exports2.base64decode = void 0; - var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); - var decTable = []; - for (let i = 0; i < encTable.length; i++) - decTable[encTable[i].charCodeAt(0)] = i; - decTable["-".charCodeAt(0)] = encTable.indexOf("+"); - decTable["_".charCodeAt(0)] = encTable.indexOf("/"); - function base64decode(base64Str) { - let es = base64Str.length * 3 / 4; - if (base64Str[base64Str.length - 2] == "=") - es -= 2; - else if (base64Str[base64Str.length - 1] == "=") - es -= 1; - let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; - for (let i = 0; i < base64Str.length; i++) { - b = decTable[base64Str.charCodeAt(i)]; - if (b === void 0) { - switch (base64Str[i]) { - case "=": - groupPos = 0; - // reset state when padding found - case "\n": - case "\r": - case " ": - case " ": - continue; - // skip white-space, and padding - default: - throw Error(`invalid base64 string.`); - } + exports2.ServiceType = void 0; + var reflection_info_1 = require_reflection_info2(); + var ServiceType = class { + static { + __name(this, "ServiceType"); + } + constructor(typeName, methods, options) { + this.typeName = typeName; + this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); + this.options = options !== null && options !== void 0 ? options : {}; + } + }; + exports2.ServiceType = ServiceType; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js +var require_rpc_error = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RpcError = void 0; + var RpcError = class extends Error { + static { + __name(this, "RpcError"); + } + constructor(message, code = "UNKNOWN", meta) { + super(message); + this.name = "RpcError"; + Object.setPrototypeOf(this, new.target.prototype); + this.code = code; + this.meta = meta !== null && meta !== void 0 ? meta : {}; + } + toString() { + const l = [this.name + ": " + this.message]; + if (this.code) { + l.push(""); + l.push("Code: " + this.code); } - switch (groupPos) { - case 0: - p = b; - groupPos = 1; - break; - case 1: - bytes[bytePos++] = p << 2 | (b & 48) >> 4; - p = b; - groupPos = 2; - break; - case 2: - bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; - p = b; - groupPos = 3; - break; - case 3: - bytes[bytePos++] = (p & 3) << 6 | b; - groupPos = 0; - break; + if (this.serviceName && this.methodName) { + l.push("Method: " + this.serviceName + "/" + this.methodName); } - } - if (groupPos == 1) - throw Error(`invalid base64 string.`); - return bytes.subarray(0, bytePos); - } - __name(base64decode, "base64decode"); - exports2.base64decode = base64decode; - function base64encode(bytes) { - let base64 = "", groupPos = 0, b, p = 0; - for (let i = 0; i < bytes.length; i++) { - b = bytes[i]; - switch (groupPos) { - case 0: - base64 += encTable[b >> 2]; - p = (b & 3) << 4; - groupPos = 1; - break; - case 1: - base64 += encTable[p | b >> 4]; - p = (b & 15) << 2; - groupPos = 2; - break; - case 2: - base64 += encTable[p | b >> 6]; - base64 += encTable[b & 63]; - groupPos = 0; - break; + let m = Object.entries(this.meta); + if (m.length) { + l.push(""); + l.push("Meta:"); + for (let [k, v] of m) { + l.push(` ${k}: ${v}`); + } } + return l.join("\n"); } - if (groupPos) { - base64 += encTable[p]; - base64 += "="; - if (groupPos == 1) - base64 += "="; - } - return base64; - } - __name(base64encode, "base64encode"); - exports2.base64encode = base64encode; + }; + exports2.RpcError = RpcError; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js -var require_protobufjs_utf8 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js +var require_rpc_options = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.utf8read = void 0; - var fromCharCodes = /* @__PURE__ */ __name((chunk) => String.fromCharCode.apply(String, chunk), "fromCharCodes"); - function utf8read(bytes) { - if (bytes.length < 1) - return ""; - let pos = 0, parts = [], chunk = [], i = 0, t; - let len = bytes.length; - while (pos < len) { - t = bytes[pos++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; - chunk[i++] = 55296 + (t >> 10); - chunk[i++] = 56320 + (t & 1023); - } else - chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; - if (i > 8191) { - parts.push(fromCharCodes(chunk)); - i = 0; + exports2.mergeRpcOptions = void 0; + var runtime_1 = require_commonjs7(); + function mergeRpcOptions(defaults, options) { + if (!options) + return defaults; + let o = {}; + copy(defaults, o); + copy(options, o); + for (let key of Object.keys(options)) { + let val = options[key]; + switch (key) { + case "jsonOptions": + o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + break; + case "binaryOptions": + o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + break; + case "meta": + o.meta = {}; + copy(defaults.meta, o.meta); + copy(options.meta, o.meta); + break; + case "interceptors": + o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + break; } } - if (parts.length) { - if (i) - parts.push(fromCharCodes(chunk.slice(0, i))); - return parts.join(""); + return o; + } + __name(mergeRpcOptions, "mergeRpcOptions"); + exports2.mergeRpcOptions = mergeRpcOptions; + function copy(a, into) { + if (!a) + return; + let c = into; + for (let [k, v] of Object.entries(a)) { + if (v instanceof Date) + c[k] = new Date(v.getTime()); + else if (Array.isArray(v)) + c[k] = v.concat(); + else + c[k] = v; } - return fromCharCodes(chunk.slice(0, i)); } - __name(utf8read, "utf8read"); - exports2.utf8read = utf8read; + __name(copy, "copy"); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js -var require_binary_format_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js +var require_deferred = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; - var UnknownFieldHandler; - (function(UnknownFieldHandler2) { - UnknownFieldHandler2.symbol = Symbol.for("protobuf-ts/unknown"); - UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { - let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; - container.push({ no: fieldNo, wireType, data }); - }; - UnknownFieldHandler2.onWrite = (typeName, message, writer) => { - for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) - writer.tag(no, wireType).raw(data); - }; - UnknownFieldHandler2.list = (message, fieldNo) => { - if (is(message)) { - let all = message[UnknownFieldHandler2.symbol]; - return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; + exports2.Deferred = exports2.DeferredState = void 0; + var DeferredState; + (function(DeferredState2) { + DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; + DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; + DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; + })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); + var Deferred = class { + static { + __name(this, "Deferred"); + } + /** + * @param preventUnhandledRejectionWarning - prevents the warning + * "Unhandled Promise rejection" by adding a noop rejection handler. + * Working with calls returned from the runtime-rpc package in an + * async function usually means awaiting one call property after + * the other. This means that the "status" is not being awaited when + * an earlier await for the "headers" is rejected. This causes the + * "unhandled promise reject" warning. A more correct behaviour for + * calls might be to become aware whether at least one of the + * promises is handled and swallow the rejection warning for the + * others. + */ + constructor(preventUnhandledRejectionWarning = true) { + this._state = DeferredState.PENDING; + this._promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + }); + if (preventUnhandledRejectionWarning) { + this._promise.catch((_2) => { + }); } - return []; - }; - UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; - const is = /* @__PURE__ */ __name((message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]), "is"); - })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); - function mergeBinaryOptions(a, b) { - return Object.assign(Object.assign({}, a), b); - } - __name(mergeBinaryOptions, "mergeBinaryOptions"); - exports2.mergeBinaryOptions = mergeBinaryOptions; - var WireType; - (function(WireType2) { - WireType2[WireType2["Varint"] = 0] = "Varint"; - WireType2[WireType2["Bit64"] = 1] = "Bit64"; - WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; - WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; - WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; - WireType2[WireType2["Bit32"] = 5] = "Bit32"; - })(WireType = exports2.WireType || (exports2.WireType = {})); + } + /** + * Get the current state of the promise. + */ + get state() { + return this._state; + } + /** + * Get the deferred promise. + */ + get promise() { + return this._promise; + } + /** + * Resolve the promise. Throws if the promise is already resolved or rejected. + */ + resolve(value) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); + this._resolve(value); + this._state = DeferredState.RESOLVED; + } + /** + * Reject the promise. Throws if the promise is already resolved or rejected. + */ + reject(reason) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); + this._reject(reason); + this._state = DeferredState.REJECTED; + } + /** + * Resolve the promise. Ignore if not pending. + */ + resolvePending(val) { + if (this._state === DeferredState.PENDING) + this.resolve(val); + } + /** + * Reject the promise. Ignore if not pending. + */ + rejectPending(reason) { + if (this._state === DeferredState.PENDING) + this.reject(reason); + } + }; + exports2.Deferred = Deferred; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js -var require_goog_varint = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js +var require_rpc_output_stream = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; - function varint64read() { - let lowBits = 0; - let highBits = 0; - for (let shift = 0; shift < 28; shift += 7) { - let b = this.buf[this.pos++]; - lowBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } + exports2.RpcOutputStreamController = void 0; + var deferred_1 = require_deferred(); + var runtime_1 = require_commonjs7(); + var RpcOutputStreamController = class { + static { + __name(this, "RpcOutputStreamController"); } - let middleByte = this.buf[this.pos++]; - lowBits |= (middleByte & 15) << 28; - highBits = (middleByte & 112) >> 4; - if ((middleByte & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; + constructor() { + this._lis = { + nxt: [], + msg: [], + err: [], + cmp: [] + }; + this._closed = false; } - for (let shift = 3; shift <= 31; shift += 7) { - let b = this.buf[this.pos++]; - highBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } + // --- RpcOutputStream callback API + onNext(callback) { + return this.addLis(callback, this._lis.nxt); } - throw new Error("invalid varint"); - } - __name(varint64read, "varint64read"); - exports2.varint64read = varint64read; - function varint64write(lo, hi, bytes) { - for (let i = 0; i < 28; i = i + 7) { - const shift = lo >>> i; - const hasNext = !(shift >>> 7 == 0 && hi == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; - } + onMessage(callback) { + return this.addLis(callback, this._lis.msg); } - const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; - const hasMoreBits = !(hi >> 3 == 0); - bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); - if (!hasMoreBits) { - return; + onError(callback) { + return this.addLis(callback, this._lis.err); } - for (let i = 3; i < 31; i = i + 7) { - const shift = hi >>> i; - const hasNext = !(shift >>> 7 == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; - } + onComplete(callback) { + return this.addLis(callback, this._lis.cmp); } - bytes.push(hi >>> 31 & 1); - } - __name(varint64write, "varint64write"); - exports2.varint64write = varint64write; - var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); - function int64fromString(dec) { - let minus = dec[0] == "-"; - if (minus) - dec = dec.slice(1); - const base = 1e6; - let lowBits = 0; - let highBits = 0; - function add1e6digit(begin, end) { - const digit1e6 = Number(dec.slice(begin, end)); - highBits *= base; - lowBits = lowBits * base + digit1e6; - if (lowBits >= TWO_PWR_32_DBL) { - highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); - lowBits = lowBits % TWO_PWR_32_DBL; - } + addLis(callback, list) { + list.push(callback); + return () => { + let i = list.indexOf(callback); + if (i >= 0) + list.splice(i, 1); + }; } - __name(add1e6digit, "add1e6digit"); - add1e6digit(-24, -18); - add1e6digit(-18, -12); - add1e6digit(-12, -6); - add1e6digit(-6); - return [minus, lowBits, highBits]; - } - __name(int64fromString, "int64fromString"); - exports2.int64fromString = int64fromString; - function int64toString(bitsLow, bitsHigh) { - if (bitsHigh >>> 0 <= 2097151) { - return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); + // remove all listeners + clearLis() { + for (let l of Object.values(this._lis)) + l.splice(0, l.length); } - let low = bitsLow & 16777215; - let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; - let high = bitsHigh >> 16 & 65535; - let digitA = low + mid * 6777216 + high * 6710656; - let digitB = mid + high * 8147497; - let digitC = high * 2; - let base = 1e7; - if (digitA >= base) { - digitB += Math.floor(digitA / base); - digitA %= base; + // --- Controller API + /** + * Is this stream already closed by a completion or error? + */ + get closed() { + return this._closed !== false; } - if (digitB >= base) { - digitC += Math.floor(digitB / base); - digitB %= base; + /** + * Emit message, close with error, or close successfully, but only one + * at a time. + * Can be used to wrap a stream by using the other stream's `onNext`. + */ + notifyNext(message, error, complete) { + runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); + if (message) + this.notifyMessage(message); + if (error) + this.notifyError(error); + if (complete) + this.notifyComplete(); } - function decimalFrom1e7(digit1e7, needLeadingZeros) { - let partial = digit1e7 ? String(digit1e7) : ""; - if (needLeadingZeros) { - return "0000000".slice(partial.length) + partial; - } - return partial; + /** + * Emits a new message. Throws if stream is closed. + * + * Triggers onNext and onMessage callbacks. + */ + notifyMessage(message) { + runtime_1.assert(!this.closed, "stream is closed"); + this.pushIt({ value: message, done: false }); + this._lis.msg.forEach((l) => l(message)); + this._lis.nxt.forEach((l) => l(message, void 0, false)); } - __name(decimalFrom1e7, "decimalFrom1e7"); - return decimalFrom1e7( - digitC, - /*needLeadingZeros=*/ - 0 - ) + decimalFrom1e7( - digitB, - /*needLeadingZeros=*/ - digitC - ) + // If the final 1e7 digit didn't need leading zeros, we would have - // returned via the trivial code path at the top. - decimalFrom1e7( - digitA, - /*needLeadingZeros=*/ - 1 - ); - } - __name(int64toString, "int64toString"); - exports2.int64toString = int64toString; - function varint32write(value, bytes) { - if (value >= 0) { - while (value > 127) { - bytes.push(value & 127 | 128); - value = value >>> 7; + /** + * Closes the stream with an error. Throws if stream is closed. + * + * Triggers onNext and onError callbacks. + */ + notifyError(error) { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = error; + this.pushIt(error); + this._lis.err.forEach((l) => l(error)); + this._lis.nxt.forEach((l) => l(void 0, error, false)); + this.clearLis(); + } + /** + * Closes the stream successfully. Throws if stream is closed. + * + * Triggers onNext and onComplete callbacks. + */ + notifyComplete() { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = true; + this.pushIt({ value: null, done: true }); + this._lis.cmp.forEach((l) => l()); + this._lis.nxt.forEach((l) => l(void 0, void 0, true)); + this.clearLis(); + } + /** + * Creates an async iterator (that can be used with `for await {...}`) + * to consume the stream. + * + * Some things to note: + * - If an error occurs, the `for await` will throw it. + * - If an error occurred before the `for await` was started, `for await` + * will re-throw it. + * - If the stream is already complete, the `for await` will be empty. + * - If your `for await` consumes slower than the stream produces, + * for example because you are relaying messages in a slow operation, + * messages are queued. + */ + [Symbol.asyncIterator]() { + if (!this._itState) { + this._itState = { q: [] }; } - bytes.push(value); - } else { - for (let i = 0; i < 9; i++) { - bytes.push(value & 127 | 128); - value = value >> 7; + if (this._closed === true) + this.pushIt({ value: null, done: true }); + else if (this._closed !== false) + this.pushIt(this._closed); + return { + next: /* @__PURE__ */ __name(() => { + let state = this._itState; + runtime_1.assert(state, "bad state"); + runtime_1.assert(!state.p, "iterator contract broken"); + let first = state.q.shift(); + if (first) + return "value" in first ? Promise.resolve(first) : Promise.reject(first); + state.p = new deferred_1.Deferred(); + return state.p.promise; + }, "next") + }; + } + // "push" a new iterator result. + // this either resolves a pending promise, or enqueues the result. + pushIt(result) { + let state = this._itState; + if (!state) + return; + if (state.p) { + const p = state.p; + runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); + "value" in result ? p.resolve(result) : p.reject(result); + delete state.p; + } else { + state.q.push(result); } - bytes.push(1); } - } - __name(varint32write, "varint32write"); - exports2.varint32write = varint32write; - function varint32read() { - let b = this.buf[this.pos++]; - let result = b & 127; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + }; + exports2.RpcOutputStreamController = RpcOutputStreamController; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js +var require_unary_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - b = this.buf[this.pos++]; - result |= (b & 127) << 7; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UnaryCall = void 0; + var UnaryCall = class { + static { + __name(this, "UnaryCall"); } - b = this.buf[this.pos++]; - result |= (b & 127) << 14; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; + } + /** + * If you are only interested in the final outcome of this call, + * you can await it to receive a `FinishedUnaryCall`. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - b = this.buf[this.pos++]; - result |= (b & 127) << 21; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + response, + status, + trailers + }; + }); } - b = this.buf[this.pos++]; - result |= (b & 15) << 28; - for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) - b = this.buf[this.pos++]; - if ((b & 128) != 0) - throw new Error("invalid varint"); - this.assertBounds(); - return result >>> 0; - } - __name(varint32read, "varint32read"); - exports2.varint32read = varint32read; + }; + exports2.UnaryCall = UnaryCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js -var require_pb_long = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js +var require_server_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; - var goog_varint_1 = require_goog_varint(); - var BI; - function detectBi() { - const dv = new DataView(new ArrayBuffer(8)); - const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; - BI = ok ? { - MIN: BigInt("-9223372036854775808"), - MAX: BigInt("9223372036854775807"), - UMIN: BigInt("0"), - UMAX: BigInt("18446744073709551615"), - C: BigInt, - V: dv - } : void 0; - } - __name(detectBi, "detectBi"); - exports2.detectBi = detectBi; - detectBi(); - function assertBi(bi) { - if (!bi) - throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); - } - __name(assertBi, "assertBi"); - var RE_DECIMAL_STR = /^-?[0-9]+$/; - var TWO_PWR_32_DBL = 4294967296; - var HALF_2_PWR_32 = 2147483648; - var SharedPbLong = class { + exports2.ServerStreamingCall = void 0; + var ServerStreamingCall = class { static { - __name(this, "SharedPbLong"); + __name(this, "ServerStreamingCall"); } - /** - * Create a new instance with the given bits. - */ - constructor(lo, hi) { - this.lo = lo | 0; - this.hi = hi | 0; + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; } /** - * Is this instance equal to 0? + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * You should first setup some listeners to the `request` to + * see the actual messages the server replied with. */ - isZero() { - return this.lo == 0 && this.hi == 0; + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - /** - * Convert to a native number. - */ - toNumber() { - let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); - if (!Number.isSafeInteger(result)) - throw new Error("cannot convert to safe number"); - return result; + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + status, + trailers + }; + }); } }; - var PbULong = class _PbULong extends SharedPbLong { - static { - __name(this, "PbULong"); + exports2.ServerStreamingCall = ServerStreamingCall; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js +var require_client_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.UMIN) - throw new Error("signed value for ulong"); - if (value > BI.UMAX) - throw new Error("ulong too large"); - BI.V.setBigUint64(0, value, true); - return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) - throw new Error("signed value for ulong"); - return new _PbULong(lo, hi); - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - if (value < 0) - throw new Error("signed value for ulong"); - return new _PbULong(value, value / TWO_PWR_32_DBL); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - throw new Error("unknown value " + typeof value); + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ClientStreamingCall = void 0; + var ClientStreamingCall = class { + static { + __name(this, "ClientStreamingCall"); } - /** - * Convert to decimal string. - */ - toString() { - return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } /** - * Convert to native bigint. + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigUint64(0, true); + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + response, + status, + trailers + }; + }); } }; - exports2.PbULong = PbULong; - PbULong.ZERO = new PbULong(0, 0); - var PbLong = class _PbLong extends SharedPbLong { - static { - __name(this, "PbLong"); + exports2.ClientStreamingCall = ClientStreamingCall; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js +var require_duplex_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.MIN) - throw new Error("signed long too small"); - if (value > BI.MAX) - throw new Error("signed long too large"); - BI.V.setBigInt64(0, value, true); - return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) { - if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) - throw new Error("signed long too small"); - } else if (hi >= HALF_2_PWR_32) - throw new Error("signed long too large"); - let pbl = new _PbLong(lo, hi); - return minus ? pbl.negate() : pbl; - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - throw new Error("unknown value " + typeof value); - } - /** - * Do we have a minus sign? - */ - isNegative() { - return (this.hi & HALF_2_PWR_32) !== 0; + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DuplexStreamingCall = void 0; + var DuplexStreamingCall = class { + static { + __name(this, "DuplexStreamingCall"); } - /** - * Negate two's complement. - * Invert all the bits and add one to the result. - */ - negate() { - let hi = ~this.hi, lo = this.lo; - if (lo) - lo = ~lo + 1; - else - hi += 1; - return new _PbLong(lo, hi); + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; } /** - * Convert to decimal string. + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. */ - toString() { - if (BI) - return this.toBigInt().toString(); - if (this.isNegative()) { - let n = this.negate(); - return "-" + goog_varint_1.int64toString(n.lo, n.hi); - } - return goog_varint_1.int64toString(this.lo, this.hi); + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigInt64(0, true); + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + status, + trailers + }; + }); } }; - exports2.PbLong = PbLong; - PbLong.ZERO = new PbLong(0, 0); + exports2.DuplexStreamingCall = DuplexStreamingCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js -var require_binary_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js +var require_test_transport = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryReader = exports2.binaryReadOptions = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var defaultsRead = { - readUnknownField: true, - readerFactory: /* @__PURE__ */ __name((bytes) => new BinaryReader(bytes), "readerFactory") - }; - function binaryReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - __name(binaryReadOptions, "binaryReadOptions"); - exports2.binaryReadOptions = binaryReadOptions; - var BinaryReader = class { - static { - __name(this, "BinaryReader"); - } - constructor(buf, textDecoder) { - this.varint64 = goog_varint_1.varint64read; - this.uint32 = goog_varint_1.varint32read; - this.buf = buf; - this.len = buf.length; - this.pos = 0; - this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); - this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { - fatal: true, - ignoreBOM: true + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); }); } - /** - * Reads a tag - field number and wire type. - */ - tag() { - let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; - if (fieldNo <= 0 || wireType < 0 || wireType > 5) - throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); - return [fieldNo, wireType]; - } - /** - * Skip one element on the wire and return the skipped data. - * Supports WireType.StartGroup since v2.0.0-alpha.23. - */ - skip(wireType) { - let start = this.pos; - switch (wireType) { - case binary_format_contract_1.WireType.Varint: - while (this.buf[this.pos++] & 128) { - } - break; - case binary_format_contract_1.WireType.Bit64: - this.pos += 4; - case binary_format_contract_1.WireType.Bit32: - this.pos += 4; - break; - case binary_format_contract_1.WireType.LengthDelimited: - let len = this.uint32(); - this.pos += len; - break; - case binary_format_contract_1.WireType.StartGroup: - let t; - while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { - this.skip(t); - } - break; - default: - throw new Error("cant skip wire type " + wireType); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - this.assertBounds(); - return this.buf.subarray(start, this.pos); - } - /** - * Throws error if position in byte array is out of range. - */ - assertBounds() { - if (this.pos > this.len) - throw new RangeError("premature EOF"); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TestTransport = void 0; + var rpc_error_1 = require_rpc_error(); + var runtime_1 = require_commonjs7(); + var rpc_output_stream_1 = require_rpc_output_stream(); + var rpc_options_1 = require_rpc_options(); + var unary_call_1 = require_unary_call(); + var server_streaming_call_1 = require_server_streaming_call(); + var client_streaming_call_1 = require_client_streaming_call(); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + var TestTransport = class _TestTransport { + static { + __name(this, "TestTransport"); } /** - * Read a `int32` field, a signed 32 bit varint. + * Initialize with mock data. Omitted fields have default value. */ - int32() { - return this.uint32() | 0; + constructor(data) { + this.suppressUncaughtRejections = true; + this.headerDelay = 10; + this.responseDelay = 50; + this.betweenResponseDelay = 10; + this.afterResponseDelay = 10; + this.data = data !== null && data !== void 0 ? data : {}; } /** - * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. + * Sent message(s) during the last operation. */ - sint32() { - let zze = this.uint32(); - return zze >>> 1 ^ -(zze & 1); + get sentMessages() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.sent; + } else if (typeof this.lastInput == "object") { + return [this.lastInput.single]; + } + return []; } /** - * Read a `int64` field, a signed 64-bit varint. + * Sending message(s) completed? */ - int64() { - return new pb_long_1.PbLong(...this.varint64()); + get sendComplete() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.completed; + } else if (typeof this.lastInput == "object") { + return true; + } + return false; } - /** - * Read a `uint64` field, an unsigned 64-bit varint. - */ - uint64() { - return new pb_long_1.PbULong(...this.varint64()); + // Creates a promise for response headers from the mock data. + promiseHeaders() { + var _a; + const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; + return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); } - /** - * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. - */ - sint64() { - let [lo, hi] = this.varint64(); - let s = -(lo & 1); - lo = (lo >>> 1 | (hi & 1) << 31) ^ s; - hi = hi >>> 1 ^ s; - return new pb_long_1.PbLong(lo, hi); + // Creates a promise for a single, valid, message from the mock data. + promiseSingleResponse(method) { + if (this.data.response instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.response); + } + let r; + if (Array.isArray(this.data.response)) { + runtime_1.assert(this.data.response.length > 0); + r = this.data.response[0]; + } else if (this.data.response !== void 0) { + r = this.data.response; + } else { + r = method.O.create(); + } + runtime_1.assert(method.O.is(r)); + return Promise.resolve(r); } /** - * Read a `bool` field, a variant. + * Pushes response messages from the mock data to the output stream. + * If an error response, status or trailers are mocked, the stream is + * closed with the respective error. + * Otherwise, stream is completed successfully. + * + * The returned promise resolves when the stream is closed. It should + * not reject. If it does, code is broken. */ - bool() { - let [lo, hi] = this.varint64(); - return lo !== 0 || hi !== 0; + streamResponses(method, stream, abort) { + return __awaiter3(this, void 0, void 0, function* () { + const messages = []; + if (this.data.response === void 0) { + messages.push(method.O.create()); + } else if (Array.isArray(this.data.response)) { + for (let msg of this.data.response) { + runtime_1.assert(method.O.is(msg)); + messages.push(msg); + } + } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { + runtime_1.assert(method.O.is(this.data.response)); + messages.push(this.data.response); + } + try { + yield delay(this.responseDelay, abort)(void 0); + } catch (error) { + stream.notifyError(error); + return; + } + if (this.data.response instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.response); + return; + } + for (let msg of messages) { + stream.notifyMessage(msg); + try { + yield delay(this.betweenResponseDelay, abort)(void 0); + } catch (error) { + stream.notifyError(error); + return; + } + } + if (this.data.status instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.status); + return; + } + if (this.data.trailers instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.trailers); + return; + } + stream.notifyComplete(); + }); } - /** - * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. - */ - fixed32() { - return this.view.getUint32((this.pos += 4) - 4, true); + // Creates a promise for response status from the mock data. + promiseStatus() { + var _a; + const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; + return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); } - /** - * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. - */ - sfixed32() { - return this.view.getInt32((this.pos += 4) - 4, true); + // Creates a promise for response trailers from the mock data. + promiseTrailers() { + var _a; + const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; + return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); } - /** - * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. - */ - fixed64() { - return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + maybeSuppressUncaught(...promise) { + if (this.suppressUncaughtRejections) { + for (let p of promise) { + p.catch(() => { + }); + } + } } - /** - * Read a `fixed64` field, a signed, fixed-length 64-bit integer. - */ - sfixed64() { - return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + mergeOptions(options) { + return rpc_options_1.mergeRpcOptions({}, options); } - /** - * Read a `float` field, 32-bit floating point number. - */ - float() { - return this.view.getFloat32((this.pos += 4) - 4, true); + unary(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { + }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); } - /** - * Read a `double` field, a 64-bit floating point number. - */ - double() { - return this.view.getFloat64((this.pos += 8) - 8, true); + serverStreaming(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); } - /** - * Read a `bytes` field, length-delimited arbitrary data. - */ - bytes() { - let len = this.uint32(); - let start = this.pos; - this.pos += len; - this.assertBounds(); - return this.buf.subarray(start, start + len); + clientStreaming(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { + }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); } - /** - * Read a `string` field, length-delimited data converted to UTF-8 text. - */ - string() { - return this.textDecoder.decode(this.bytes()); + duplex(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); } }; - exports2.BinaryReader = BinaryReader; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js -var require_assert = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; - function assert(condition, msg) { - if (!condition) { - throw new Error(msg); - } - } - __name(assert, "assert"); - exports2.assert = assert; - function assertNever(value, msg) { - throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); - } - __name(assertNever, "assertNever"); - exports2.assertNever = assertNever; - var FLOAT32_MAX = 34028234663852886e22; - var FLOAT32_MIN = -34028234663852886e22; - var UINT32_MAX = 4294967295; - var INT32_MAX = 2147483647; - var INT32_MIN = -2147483648; - function assertInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid int 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) - throw new Error("invalid int 32: " + arg); - } - __name(assertInt32, "assertInt32"); - exports2.assertInt32 = assertInt32; - function assertUInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid uint 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) - throw new Error("invalid uint 32: " + arg); - } - __name(assertUInt32, "assertUInt32"); - exports2.assertUInt32 = assertUInt32; - function assertFloat32(arg) { - if (typeof arg !== "number") - throw new Error("invalid float 32: " + typeof arg); - if (!Number.isFinite(arg)) - return; - if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) - throw new Error("invalid float 32: " + arg); - } - __name(assertFloat32, "assertFloat32"); - exports2.assertFloat32 = assertFloat32; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js -var require_binary_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var assert_1 = require_assert(); - var defaultsWrite = { - writeUnknownFields: true, - writerFactory: /* @__PURE__ */ __name(() => new BinaryWriter(), "writerFactory") + exports2.TestTransport = TestTransport; + TestTransport.defaultHeaders = { + responseHeader: "test" }; - function binaryWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + TestTransport.defaultStatus = { + code: "OK", + detail: "all good" + }; + TestTransport.defaultTrailers = { + responseTrailer: "test" + }; + function delay(ms, abort) { + return (v) => new Promise((resolve, reject) => { + if (abort === null || abort === void 0 ? void 0 : abort.aborted) { + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + } else { + const id = setTimeout(() => resolve(v), ms); + if (abort) { + abort.addEventListener("abort", (ev) => { + clearTimeout(id); + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + }); + } + } + }); } - __name(binaryWriteOptions, "binaryWriteOptions"); - exports2.binaryWriteOptions = binaryWriteOptions; - var BinaryWriter = class { + __name(delay, "delay"); + var TestInputStream = class { static { - __name(this, "BinaryWriter"); + __name(this, "TestInputStream"); } - constructor(textEncoder) { - this.stack = []; - this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); - this.chunks = []; - this.buf = []; + constructor(data, abort) { + this._completed = false; + this._sent = []; + this.data = data; + this.abort = abort; } - /** - * Return all bytes written and reset this writer. - */ - finish() { - this.chunks.push(new Uint8Array(this.buf)); - let len = 0; - for (let i = 0; i < this.chunks.length; i++) - len += this.chunks[i].length; - let bytes = new Uint8Array(len); - let offset = 0; - for (let i = 0; i < this.chunks.length; i++) { - bytes.set(this.chunks[i], offset); - offset += this.chunks[i].length; + get sent() { + return this._sent; + } + get completed() { + return this._completed; + } + send(message) { + if (this.data.inputMessage instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputMessage); } - this.chunks = []; - return bytes; + const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; + return Promise.resolve(void 0).then(() => { + this._sent.push(message); + }).then(delay(delayMs, this.abort)); } - /** - * Start a new fork for length-delimited data like a message - * or a packed repeated field. - * - * Must be joined later with `join()`. - */ - fork() { - this.stack.push({ chunks: this.chunks, buf: this.buf }); - this.chunks = []; - this.buf = []; - return this; + complete() { + if (this.data.inputComplete instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputComplete); + } + const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; + return Promise.resolve(void 0).then(() => { + this._completed = true; + }).then(delay(delayMs, this.abort)); } - /** - * Join the last fork. Write its length and bytes, then - * return to the previous state. - */ - join() { - let chunk = this.finish(); - let prev = this.stack.pop(); - if (!prev) - throw new Error("invalid state, fork stack empty"); - this.chunks = prev.chunks; - this.buf = prev.buf; - this.uint32(chunk.byteLength); - return this.raw(chunk); + }; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js +var require_rpc_interceptor = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; + var runtime_1 = require_commonjs7(); + function stackIntercept(kind, transport, method, options, input) { + var _a, _b, _c, _d; + if (kind == "unary") { + let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.unary(mtd, inp, opt), "tail"); + for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt), "tail"); + } + return tail(method, input, options); } - /** - * Writes a tag (field number and wire type). - * - * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. - * - * Generated code should compute the tag ahead of time and call `uint32()`. - */ - tag(fieldNo, type) { - return this.uint32((fieldNo << 3 | type) >>> 0); + if (kind == "serverStreaming") { + let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt), "tail"); + for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt), "tail"); + } + return tail(method, input, options); } - /** - * Write a chunk of raw bytes. - */ - raw(chunk) { - if (this.buf.length) { - this.chunks.push(new Uint8Array(this.buf)); - this.buf = []; + if (kind == "clientStreaming") { + let tail = /* @__PURE__ */ __name((mtd, opt) => transport.clientStreaming(mtd, opt), "tail"); + for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptClientStreaming(next, mtd, opt), "tail"); } - this.chunks.push(chunk); - return this; + return tail(method, options); } - /** - * Write a `uint32` value, an unsigned 32 bit varint. - */ - uint32(value) { - assert_1.assertUInt32(value); - while (value > 127) { - this.buf.push(value & 127 | 128); - value = value >>> 7; + if (kind == "duplex") { + let tail = /* @__PURE__ */ __name((mtd, opt) => transport.duplex(mtd, opt), "tail"); + for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptDuplex(next, mtd, opt), "tail"); } - this.buf.push(value); - return this; + return tail(method, options); } - /** - * Write a `int32` value, a signed 32 bit varint. - */ - int32(value) { - assert_1.assertInt32(value); - goog_varint_1.varint32write(value, this.buf); - return this; + runtime_1.assertNever(kind); + } + __name(stackIntercept, "stackIntercept"); + exports2.stackIntercept = stackIntercept; + function stackUnaryInterceptors(transport, method, input, options) { + return stackIntercept("unary", transport, method, options, input); + } + __name(stackUnaryInterceptors, "stackUnaryInterceptors"); + exports2.stackUnaryInterceptors = stackUnaryInterceptors; + function stackServerStreamingInterceptors(transport, method, input, options) { + return stackIntercept("serverStreaming", transport, method, options, input); + } + __name(stackServerStreamingInterceptors, "stackServerStreamingInterceptors"); + exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; + function stackClientStreamingInterceptors(transport, method, options) { + return stackIntercept("clientStreaming", transport, method, options); + } + __name(stackClientStreamingInterceptors, "stackClientStreamingInterceptors"); + exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; + function stackDuplexStreamingInterceptors(transport, method, options) { + return stackIntercept("duplex", transport, method, options); + } + __name(stackDuplexStreamingInterceptors, "stackDuplexStreamingInterceptors"); + exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js +var require_server_call_context = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServerCallContextController = void 0; + var ServerCallContextController = class { + static { + __name(this, "ServerCallContextController"); } - /** - * Write a `bool` value, a variant. - */ - bool(value) { - this.buf.push(value ? 1 : 0); - return this; + constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { + this._cancelled = false; + this._listeners = []; + this.method = method; + this.headers = headers; + this.deadline = deadline; + this.trailers = {}; + this._sendRH = sendResponseHeadersFn; + this.status = defaultStatus; } /** - * Write a `bytes` value, length-delimited arbitrary data. + * Set the call cancelled. + * + * Invokes all callbacks registered with onCancel() and + * sets `cancelled = true`. */ - bytes(value) { - this.uint32(value.byteLength); - return this.raw(value); + notifyCancelled() { + if (!this._cancelled) { + this._cancelled = true; + for (let l of this._listeners) { + l(); + } + } } /** - * Write a `string` value, length-delimited data converted to UTF-8 text. + * Send response headers. */ - string(value) { - let chunk = this.textEncoder.encode(value); - this.uint32(chunk.byteLength); - return this.raw(chunk); + sendResponseHeaders(data) { + this._sendRH(data); } /** - * Write a `float` value, 32-bit floating point number. + * Is the call cancelled? + * + * When the client closes the connection before the server + * is done, the call is cancelled. + * + * If you want to cancel a request on the server, throw a + * RpcError with the CANCELLED status code. */ - float(value) { - assert_1.assertFloat32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setFloat32(0, value, true); - return this.raw(chunk); + get cancelled() { + return this._cancelled; } /** - * Write a `double` value, a 64-bit floating point number. + * Add a callback for cancellation. */ - double(value) { - let chunk = new Uint8Array(8); - new DataView(chunk.buffer).setFloat64(0, value, true); - return this.raw(chunk); + onCancel(callback) { + const l = this._listeners; + l.push(callback); + return () => { + let i = l.indexOf(callback); + if (i >= 0) + l.splice(i, 1); + }; } - /** - * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. - */ - fixed32(value) { - assert_1.assertUInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setUint32(0, value, true); - return this.raw(chunk); + }; + exports2.ServerCallContextController = ServerCallContextController; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js +var require_commonjs8 = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var service_type_1 = require_service_type(); + Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return service_type_1.ServiceType; + }, "get") }); + var reflection_info_1 = require_reflection_info2(); + Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMethodOptions; + }, "get") }); + Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMethodOption; + }, "get") }); + Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readServiceOption; + }, "get") }); + var rpc_error_1 = require_rpc_error(); + Object.defineProperty(exports2, "RpcError", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_error_1.RpcError; + }, "get") }); + var rpc_options_1 = require_rpc_options(); + Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_options_1.mergeRpcOptions; + }, "get") }); + var rpc_output_stream_1 = require_rpc_output_stream(); + Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_output_stream_1.RpcOutputStreamController; + }, "get") }); + var test_transport_1 = require_test_transport(); + Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return test_transport_1.TestTransport; + }, "get") }); + var deferred_1 = require_deferred(); + Object.defineProperty(exports2, "Deferred", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return deferred_1.Deferred; + }, "get") }); + Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return deferred_1.DeferredState; + }, "get") }); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return duplex_streaming_call_1.DuplexStreamingCall; + }, "get") }); + var client_streaming_call_1 = require_client_streaming_call(); + Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return client_streaming_call_1.ClientStreamingCall; + }, "get") }); + var server_streaming_call_1 = require_server_streaming_call(); + Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return server_streaming_call_1.ServerStreamingCall; + }, "get") }); + var unary_call_1 = require_unary_call(); + Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return unary_call_1.UnaryCall; + }, "get") }); + var rpc_interceptor_1 = require_rpc_interceptor(); + Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackIntercept; + }, "get") }); + Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackDuplexStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackClientStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackServerStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackUnaryInterceptors; + }, "get") }); + var server_call_context_1 = require_server_call_context(); + Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return server_call_context_1.ServerCallContextController; + }, "get") }); + } +}); + +// ../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js +var require_timestamp = __commonJS({ + "../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Timestamp = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var runtime_6 = require_commonjs7(); + var runtime_7 = require_commonjs7(); + var Timestamp$Type = class extends runtime_7.MessageType { + static { + __name(this, "Timestamp$Type"); } - /** - * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. - */ - sfixed32(value) { - assert_1.assertInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setInt32(0, value, true); - return this.raw(chunk); + constructor() { + super("google.protobuf.Timestamp", [ + { + no: 1, + name: "seconds", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 2, + name: "nanos", + kind: "scalar", + T: 5 + /*ScalarType.INT32*/ + } + ]); } /** - * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + * Creates a new `Timestamp` for the current time. */ - sint32(value) { - assert_1.assertInt32(value); - value = (value << 1 ^ value >> 31) >>> 0; - goog_varint_1.varint32write(value, this.buf); - return this; + now() { + const msg = this.create(); + const ms = Date.now(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; } /** - * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + * Converts a `Timestamp` to a JavaScript Date. */ - sfixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbLong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + toDate(message) { + return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1e3 + Math.ceil(message.nanos / 1e6)); } /** - * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + * Converts a JavaScript Date to a `Timestamp`. */ - fixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbULong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + fromDate(date) { + const msg = this.create(); + const ms = date.getTime(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; } /** - * Write a `int64` value, a signed 64-bit varint. + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. */ - int64(value) { - let long = pb_long_1.PbLong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + internalJsonWrite(message, options) { + let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1e3; + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (message.nanos < 0) + throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); + let z = "Z"; + if (message.nanos > 0) { + let nanosStr = (message.nanos + 1e9).toString().substring(1); + if (nanosStr.substring(3) === "000000") + z = "." + nanosStr.substring(0, 3) + "Z"; + else if (nanosStr.substring(6) === "000") + z = "." + nanosStr.substring(0, 6) + "Z"; + else + z = "." + nanosStr + "Z"; + } + return new Date(ms).toISOString().replace(".000Z", z); } /** - * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. */ - sint64(value) { - let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; - goog_varint_1.varint64write(lo, hi, this.buf); - return this; + internalJsonRead(json, options, target) { + if (typeof json !== "string") + throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); + let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); + if (!matches) + throw new Error("Unable to parse Timestamp from JSON. Invalid format."); + let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); + if (Number.isNaN(ms)) + throw new Error("Unable to parse Timestamp from JSON. Invalid value."); + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (!target) + target = this.create(); + target.seconds = runtime_6.PbLong.from(ms / 1e3).toString(); + target.nanos = 0; + if (matches[7]) + target.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1e9; + return target; } - /** - * Write a `uint64` value, an unsigned 64-bit varint. - */ - uint64(value) { - let long = pb_long_1.PbULong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + create(value) { + const message = { seconds: "0", nanos: 0 }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - }; - exports2.BinaryWriter = BinaryWriter; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js -var require_json_format_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; - var defaultsWrite = { - emitDefaultValues: false, - enumAsInteger: false, - useProtoFieldName: false, - prettySpaces: 0 - }; - var defaultsRead = { - ignoreUnknownFields: false - }; - function jsonReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - __name(jsonReadOptions, "jsonReadOptions"); - exports2.jsonReadOptions = jsonReadOptions; - function jsonWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; - } - __name(jsonWriteOptions, "jsonWriteOptions"); - exports2.jsonWriteOptions = jsonWriteOptions; - function mergeJsonOptions(a, b) { - var _a, _b; - let c = Object.assign(Object.assign({}, a), b); - c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; - return c; - } - __name(mergeJsonOptions, "mergeJsonOptions"); - exports2.mergeJsonOptions = mergeJsonOptions; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js -var require_message_type_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MESSAGE_TYPE = void 0; - exports2.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js -var require_lower_camel_case = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.lowerCamelCase = void 0; - function lowerCamelCase(snakeCase) { - let capNext = false; - const sb = []; - for (let i = 0; i < snakeCase.length; i++) { - let next = snakeCase.charAt(i); - if (next == "_") { - capNext = true; - } else if (/\d/.test(next)) { - sb.push(next); - capNext = true; - } else if (capNext) { - sb.push(next.toUpperCase()); - capNext = false; - } else if (i == 0) { - sb.push(next.toLowerCase()); - } else { - sb.push(next); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 seconds */ + 1: + message.seconds = reader.int64().toString(); + break; + case /* int32 nanos */ + 2: + message.nanos = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - return sb.join(""); - } - __name(lowerCamelCase, "lowerCamelCase"); - exports2.lowerCamelCase = lowerCamelCase; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js -var require_reflection_info = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; - var lower_camel_case_1 = require_lower_camel_case(); - var ScalarType; - (function(ScalarType2) { - ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; - ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; - ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; - ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; - ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; - ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; - ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; - ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; - ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; - ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; - ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; - ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; - ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; - ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; - ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; - })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); - var LongType; - (function(LongType2) { - LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; - LongType2[LongType2["STRING"] = 1] = "STRING"; - LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; - })(LongType = exports2.LongType || (exports2.LongType = {})); - var RepeatType; - (function(RepeatType2) { - RepeatType2[RepeatType2["NO"] = 0] = "NO"; - RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; - RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; - })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); - function normalizeFieldInfo(field) { - var _a, _b, _c, _d; - field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); - field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); - field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; - field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; - return field; - } - __name(normalizeFieldInfo, "normalizeFieldInfo"); - exports2.normalizeFieldInfo = normalizeFieldInfo; - function readFieldOptions(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; - } - __name(readFieldOptions, "readFieldOptions"); - exports2.readFieldOptions = readFieldOptions; - function readFieldOption(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; - } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; - } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - __name(readFieldOption, "readFieldOption"); - exports2.readFieldOption = readFieldOption; - function readMessageOption(messageType, extensionName, extensionType) { - const options = messageType.options; - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; - } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - __name(readMessageOption, "readMessageOption"); - exports2.readMessageOption = readMessageOption; + internalBinaryWrite(message, writer, options) { + if (message.seconds !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); + if (message.nanos !== 0) + writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.Timestamp = new Timestamp$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js -var require_oneof = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js +var require_cacheentry = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; - function isOneofGroup(any) { - if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { - return false; - } - switch (typeof any.oneofKind) { - case "string": - if (any[any.oneofKind] === void 0) - return false; - return Object.keys(any).length == 2; - case "undefined": - return Object.keys(any).length == 1; - default: - return false; - } - } - __name(isOneofGroup, "isOneofGroup"); - exports2.isOneofGroup = isOneofGroup; - function getOneofValue(oneof, kind) { - return oneof[kind]; - } - __name(getOneofValue, "getOneofValue"); - exports2.getOneofValue = getOneofValue; - function setOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0) { - oneof[kind] = value; + exports2.CacheEntry = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var timestamp_1 = require_timestamp(); + var CacheEntry$Type = class extends runtime_5.MessageType { + static { + __name(this, "CacheEntry$Type"); } - } - __name(setOneofValue, "setOneofValue"); - exports2.setOneofValue = setOneofValue; - function setUnknownOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; + constructor() { + super("github.actions.results.entities.v1.CacheEntry", [ + { + no: 1, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "hash", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 5, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { no: 6, name: "created_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, + { no: 7, name: "last_accessed_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, + { no: 8, name: "expires_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") } + ]); } - oneof.oneofKind = kind; - if (value !== void 0 && kind !== void 0) { - oneof[kind] = value; + create(value) { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - } - __name(setUnknownOneofValue, "setUnknownOneofValue"); - exports2.setUnknownOneofValue = setUnknownOneofValue; - function clearOneofValue(oneof) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ + 1: + message.key = reader.string(); + break; + case /* string hash */ + 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ + 4: + message.scope = reader.string(); + break; + case /* string version */ + 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ + 6: + message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ + 7: + message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ + 8: + message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; } - oneof.oneofKind = void 0; - } - __name(clearOneofValue, "clearOneofValue"); - exports2.clearOneofValue = clearOneofValue; - function getSelectedOneofValue(oneof) { - if (oneof.oneofKind === void 0) { - return void 0; + internalBinaryWrite(message, writer, options) { + if (message.key !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.hash !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.scope !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.version !== "") + writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version); + if (message.createdAt) + timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.lastAccessedAt) + timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.expiresAt) + timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return oneof[oneof.oneofKind]; - } - __name(getSelectedOneofValue, "getSelectedOneofValue"); - exports2.getSelectedOneofValue = getSelectedOneofValue; + }; + exports2.CacheEntry = new CacheEntry$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js -var require_reflection_type_check = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js +var require_cachescope = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionTypeCheck = void 0; - var reflection_info_1 = require_reflection_info(); - var oneof_1 = require_oneof(); - var ReflectionTypeCheck = class { + exports2.CacheScope = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var CacheScope$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionTypeCheck"); - } - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + __name(this, "CacheScope$Type"); } - prepare() { - if (this.data) - return; - const req = [], known = [], oneofs = []; - for (let field of this.fields) { - if (field.oneof) { - if (!oneofs.includes(field.oneof)) { - oneofs.push(field.oneof); - req.push(field.oneof); - known.push(field.oneof); - } - } else { - known.push(field.localName); - switch (field.kind) { - case "scalar": - case "enum": - if (!field.opt || field.repeat) - req.push(field.localName); - break; - case "message": - if (field.repeat) - req.push(field.localName); - break; - case "map": - req.push(field.localName); - break; - } + constructor() { + super("github.actions.results.entities.v1.CacheScope", [ + { + no: 1, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "permission", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ } - } - this.data = { req, known, oneofs: Object.values(oneofs) }; + ]); } - /** - * Is the argument a valid message as specified by the - * reflection information? - * - * Checks all field types recursively. The `depth` - * specifies how deep into the structure the check will be. - * - * With a depth of 0, only the presence of fields - * is checked. - * - * With a depth of 1 or more, the field types are checked. - * - * With a depth of 2 or more, the members of map, repeated - * and message fields are checked. - * - * Message fields will be checked recursively with depth - 1. - * - * The number of map entries / repeated values being checked - * is < depth. - */ - is(message, depth, allowExcessProperties = false) { - if (depth < 0) - return true; - if (message === null || message === void 0 || typeof message != "object") - return false; - this.prepare(); - let keys = Object.keys(message), data = this.data; - if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) - return false; - if (!allowExcessProperties) { - if (keys.some((k) => !data.known.includes(k))) - return false; - } - if (depth < 1) { - return true; - } - for (const name of data.oneofs) { - const group = message[name]; - if (!oneof_1.isOneofGroup(group)) - return false; - if (group.oneofKind === void 0) - continue; - const field = this.fields.find((f) => f.localName === group.oneofKind); - if (!field) - return false; - if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) - return false; - } - for (const field of this.fields) { - if (field.oneof !== void 0) - continue; - if (!this.field(message[field.localName], field, allowExcessProperties, depth)) - return false; - } - return true; + create(value) { + const message = { scope: "", permission: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - field(arg, field, allowExcessProperties, depth) { - let repeated = field.repeat; - switch (field.kind) { - case "scalar": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, field.T, depth, field.L); - return this.scalar(arg, field.T, field.L); - case "enum": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); - return this.scalar(arg, reflection_info_1.ScalarType.INT32); - case "message": - if (arg === void 0) - return true; - if (repeated) - return this.messages(arg, field.T(), allowExcessProperties, depth); - return this.message(arg, field.T(), allowExcessProperties, depth); - case "map": - if (typeof arg != "object" || arg === null) - return false; - if (depth < 2) - return true; - if (!this.mapKeys(arg, field.K, depth)) - return false; - switch (field.V.kind) { - case "scalar": - return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); - case "enum": - return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); - case "message": - return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); - } - break; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string scope */ + 1: + message.scope = reader.string(); + break; + case /* int64 permission */ + 2: + message.permission = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - return true; + return message; } - message(arg, type, allowExcessProperties, depth) { - if (allowExcessProperties) { - return type.isAssignable(arg, depth); - } - return type.is(arg, depth); + internalBinaryWrite(message, writer, options) { + if (message.scope !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.permission !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - messages(arg, type, allowExcessProperties, depth) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (allowExcessProperties) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.isAssignable(arg[i], depth - 1)) - return false; - } else { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.is(arg[i], depth - 1)) - return false; - } - return true; + }; + exports2.CacheScope = new CacheScope$Type(); + } +}); + +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js +var require_cachemetadata = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheMetadata = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var cachescope_1 = require_cachescope(); + var CacheMetadata$Type = class extends runtime_5.MessageType { + static { + __name(this, "CacheMetadata$Type"); } - scalar(arg, type, longType) { - let argType = typeof arg; - switch (type) { - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - switch (longType) { - case reflection_info_1.LongType.BIGINT: - return argType == "bigint"; - case reflection_info_1.LongType.NUMBER: - return argType == "number" && !isNaN(arg); - default: - return argType == "string"; - } - case reflection_info_1.ScalarType.BOOL: - return argType == "boolean"; - case reflection_info_1.ScalarType.STRING: - return argType == "string"; - case reflection_info_1.ScalarType.BYTES: - return arg instanceof Uint8Array; - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return argType == "number" && !isNaN(arg); - default: - return argType == "number" && Number.isInteger(arg); - } + constructor() { + super("github.actions.results.entities.v1.CacheMetadata", [ + { + no: 1, + name: "repository_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 2, name: "scope", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cachescope_1.CacheScope, "T") } + ]); } - scalars(arg, type, depth, longType) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (Array.isArray(arg)) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!this.scalar(arg[i], type, longType)) - return false; - } - return true; + create(value) { + const message = { repositoryId: "0", scope: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - mapKeys(map, type, depth) { - let keys = Object.keys(map); - switch (type) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type, depth); - case reflection_info_1.ScalarType.BOOL: - return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type, depth); - default: - return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 repository_id */ + 1: + message.repositoryId = reader.int64().toString(); + break; + case /* repeated github.actions.results.entities.v1.CacheScope scope */ + 2: + message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - }; - exports2.ReflectionTypeCheck = ReflectionTypeCheck; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js -var require_reflection_long_convert = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionLongConvert = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionLongConvert(long, type) { - switch (type) { - case reflection_info_1.LongType.BIGINT: - return long.toBigInt(); - case reflection_info_1.LongType.NUMBER: - return long.toNumber(); - default: - return long.toString(); + internalBinaryWrite(message, writer, options) { + if (message.repositoryId !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); + for (let i = 0; i < message.scope.length; i++) + cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - } - __name(reflectionLongConvert, "reflectionLongConvert"); - exports2.reflectionLongConvert = reflectionLongConvert; + }; + exports2.CacheMetadata = new CacheMetadata$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js -var require_reflection_json_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js +var require_cache2 = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonReader = void 0; - var json_typings_1 = require_json_typings(); - var base64_1 = require_base642(); - var reflection_info_1 = require_reflection_info(); - var pb_long_1 = require_pb_long(); - var assert_1 = require_assert(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var ReflectionJsonReader = class { + exports2.CacheService = exports2.LookupCacheEntryResponse = exports2.LookupCacheEntryRequest = exports2.ListCacheEntriesResponse = exports2.ListCacheEntriesRequest = exports2.DeleteCacheEntryResponse = exports2.DeleteCacheEntryRequest = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; + var runtime_rpc_1 = require_commonjs8(); + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var cacheentry_1 = require_cacheentry(); + var cachemetadata_1 = require_cachemetadata(); + var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionJsonReader"); + __name(this, "CreateCacheEntryRequest$Type"); } - constructor(info) { - this.info = info; + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - prepare() { - var _a; - if (this.fMap === void 0) { - this.fMap = {}; - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - for (const field of fieldsInput) { - this.fMap[field.name] = field; - this.fMap[field.jsonName] = field; - this.fMap[field.localName] = field; + create(value) { + const message = { key: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* string version */ + 3: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; } - // Cannot parse JSON for #. - assert(condition, fieldName, jsonValue) { - if (!condition) { - let what = json_typings_1.typeofJsonValue(jsonValue); - if (what == "number" || what == "boolean") - what = jsonValue.toString(); - throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); - } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.version !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Reads a message from canonical JSON format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(input, message, options) { - this.prepare(); - const oneofsHandled = []; - for (const [jsonKey, jsonValue] of Object.entries(input)) { - const field = this.fMap[jsonKey]; - if (!field) { - if (!options.ignoreUnknownFields) - throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); - continue; - } - const localName = field.localName; - let target; - if (field.oneof) { - if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { - continue; - } - if (oneofsHandled.includes(field.oneof)) - throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); - oneofsHandled.push(field.oneof); - target = message[field.oneof] = { - oneofKind: localName - }; - } else { - target = message; + }; + exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); + var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "CreateCacheEntryResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_upload_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ } - if (field.kind == "map") { - if (jsonValue === null) { - continue; - } - this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); - const fieldObj = target[localName]; - for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { - this.assert(jsonObjValue !== null, field.name + " map value", null); - let val; - switch (field.V.kind) { - case "message": - val = field.V.T().internalJsonRead(jsonObjValue, options); - break; - case "enum": - val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); - break; - } - this.assert(val !== void 0, field.name + " map value", jsonObjValue); - let key = jsonObjKey; - if (field.K == reflection_info_1.ScalarType.BOOL) - key = key == "true" ? true : key == "false" ? false : key; - key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); - fieldObj[key] = val; - } - } else if (field.repeat) { - if (jsonValue === null) - continue; - this.assert(Array.isArray(jsonValue), field.name, jsonValue); - const fieldArr = target[localName]; - for (const jsonItem of jsonValue) { - this.assert(jsonItem !== null, field.name, null); - let val; - switch (field.kind) { - case "message": - val = field.T().internalJsonRead(jsonItem, options); - break; - case "enum": - val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonItem, field.T, field.L, field.name); - break; - } - this.assert(val !== void 0, field.name, jsonValue); - fieldArr.push(val); - } - } else { - switch (field.kind) { - case "message": - if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { - this.assert(field.oneof === void 0, field.name + " (oneof member)", null); - continue; - } - target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); - break; - case "enum": - let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - target[localName] = val; - break; - case "scalar": - target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); - break; - } + ]); + } + create(value) { + const message = { ok: false, signedUploadUrl: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ + 2: + message.signedUploadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; } - /** - * Returns `false` for unrecognized string representations. - * - * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). - */ - enum(type, json, fieldName, ignoreUnknownFields) { - if (type[0] == "google.protobuf.NullValue") - assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); - if (json === null) - return 0; - switch (typeof json) { - case "number": - assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); - return json; - case "string": - let localEnumName = json; - if (type[2] && json.substring(0, type[2].length) === type[2]) - localEnumName = json.substring(type[2].length); - let enumNumber = type[1][localEnumName]; - if (typeof enumNumber === "undefined" && ignoreUnknownFields) { - return false; - } - assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); - return enumNumber; - } - assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedUploadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - scalar(json, type, longType, fieldName) { - let e; - try { - switch (type) { - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - if (json === null) - return 0; - if (json === "NaN") - return Number.NaN; - if (json === "Infinity") - return Number.POSITIVE_INFINITY; - if (json === "-Infinity") - return Number.NEGATIVE_INFINITY; - if (json === "") { - e = "empty string"; - break; - } - if (typeof json == "string" && json.trim().length !== json.length) { - e = "extra whitespace"; - break; - } - if (typeof json != "string" && typeof json != "number") { - break; - } - let float = Number(json); - if (Number.isNaN(float)) { - e = "not a number"; - break; - } - if (!Number.isFinite(float)) { - e = "too large or small"; - break; - } - if (type == reflection_info_1.ScalarType.FLOAT) - assert_1.assertFloat32(float); - return float; - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - if (json === null) - return 0; - let int32; - if (typeof json == "number") - int32 = json; - else if (json === "") - e = "empty string"; - else if (typeof json == "string") { - if (json.trim().length !== json.length) - e = "extra whitespace"; - else - int32 = Number(json); - } - if (int32 === void 0) - break; - if (type == reflection_info_1.ScalarType.UINT32) - assert_1.assertUInt32(int32); - else - assert_1.assertInt32(int32); - return int32; - // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.UINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); - // bool: - case reflection_info_1.ScalarType.BOOL: - if (json === null) - return false; - if (typeof json !== "boolean") - break; - return json; - // string: - case reflection_info_1.ScalarType.STRING: - if (json === null) - return ""; - if (typeof json !== "string") { - e = "extra whitespace"; - break; - } - try { - encodeURIComponent(json); - } catch (e2) { - e2 = "invalid UTF8"; - break; - } - return json; - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - if (json === null || json === "") - return new Uint8Array(0); - if (typeof json !== "string") - break; - return base64_1.base64decode(json); + }; + exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); + var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "FinalizeCacheEntryUploadRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - } catch (error) { - e = error.message; } - this.assert(false, fieldName + (e ? " - " + e : ""), json); + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionJsonReader = ReflectionJsonReader; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js -var require_reflection_json_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonWriter = void 0; - var base64_1 = require_base642(); - var pb_long_1 = require_pb_long(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var ReflectionJsonWriter = class { + exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); + var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionJsonWriter"); - } - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + __name(this, "FinalizeCacheEntryUploadResponse$Type"); } - /** - * Converts the message to a JSON object, based on the field descriptors. - */ - write(message, options) { - const json = {}, source = message; - for (const field of this.fields) { - if (!field.oneof) { - let jsonValue2 = this.field(field, source[field.localName], options); - if (jsonValue2 !== void 0) - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; - continue; + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ } - const group = source[field.oneof]; - if (group.oneofKind !== field.localName) - continue; - const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; - let jsonValue = this.field(field, group[field.localName], opt); - assert_1.assert(jsonValue !== void 0); - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; - } - return json; + ]); } - field(field, value, options) { - let jsonValue = void 0; - if (field.kind == "map") { - assert_1.assert(typeof value == "object" && value !== null); - const jsonObj = {}; - switch (field.V.kind) { - case "scalar": - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.scalar(field.V.T, entryValue, field.name, false, true); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "message": - const messageType = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.message(messageType, entryValue, field.name, options); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } + create(value) { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); break; - case "enum": - const enumInfo = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); - const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) - jsonValue = jsonObj; - } else if (field.repeat) { - assert_1.assert(Array.isArray(value)); - const jsonArr = []; - switch (field.kind) { - case "scalar": - for (let i = 0; i < value.length; i++) { - const val = this.scalar(field.T, value[i], field.name, field.opt, true); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "enum": - const enumInfo = field.T(); - for (let i = 0; i < value.length; i++) { - assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); - const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "message": - const messageType = field.T(); - for (let i = 0; i < value.length; i++) { - const val = this.message(messageType, value[i], field.name, options); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); + var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "GetCacheEntryDownloadURLRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ } - if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) - jsonValue = jsonArr; - } else { - switch (field.kind) { - case "scalar": - jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + ]); + } + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case "enum": - jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + case /* string key */ + 2: + message.key = reader.string(); break; - case "message": - jsonValue = this.message(field.T(), value, field.name, options); + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ + 4: + message.version = reader.string(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - return jsonValue; - } - /** - * Returns `null` as the default for google.protobuf.NullValue. - */ - enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { - if (type[0] == "google.protobuf.NullValue") - return !emitDefaultValues && !optional ? void 0 : null; - if (value === void 0) { - assert_1.assert(optional); - return void 0; - } - if (value === 0 && !emitDefaultValues && !optional) - return void 0; - assert_1.assert(typeof value == "number"); - assert_1.assert(Number.isInteger(value)); - if (enumAsInteger || !type[1].hasOwnProperty(value)) - return value; - if (type[2]) - return type[2] + type[1][value]; - return type[1][value]; - } - message(type, value, fieldName, options) { - if (value === void 0) - return options.emitDefaultValues ? null : void 0; - return type.internalJsonWrite(value, options); + return message; } - scalar(type, value, fieldName, optional, emitDefaultValues) { - if (value === void 0) { - assert_1.assert(optional); - return void 0; - } - const ed = emitDefaultValues || optional; - switch (type) { - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertInt32(value); - return value; - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertUInt32(value); - return value; - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.FLOAT: - assert_1.assertFloat32(value); - case reflection_info_1.ScalarType.DOUBLE: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assert(typeof value == "number"); - if (Number.isNaN(value)) - return "NaN"; - if (value === Number.POSITIVE_INFINITY) - return "Infinity"; - if (value === Number.NEGATIVE_INFINITY) - return "-Infinity"; - return value; - // string: - case reflection_info_1.ScalarType.STRING: - if (value === "") - return ed ? "" : void 0; - assert_1.assert(typeof value == "string"); - return value; - // bool: - case reflection_info_1.ScalarType.BOOL: - if (value === false) - return ed ? false : void 0; - assert_1.assert(typeof value == "boolean"); - return value; - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let ulong = pb_long_1.PbULong.from(value); - if (ulong.isZero() && !ed) - return void 0; - return ulong.toString(); - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let long = pb_long_1.PbLong.from(value); - if (long.isZero() && !ed) - return void 0; - return long.toString(); - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - assert_1.assert(value instanceof Uint8Array); - if (!value.byteLength) - return ed ? "" : void 0; - return base64_1.base64encode(value); - } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionJsonWriter = ReflectionJsonWriter; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js -var require_reflection_scalar_default = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionScalarDefault = void 0; - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var pb_long_1 = require_pb_long(); - function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { - switch (type) { - case reflection_info_1.ScalarType.BOOL: - return false; - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return 0; - case reflection_info_1.ScalarType.BYTES: - return new Uint8Array(0); - case reflection_info_1.ScalarType.STRING: - return ""; - default: - return 0; - } - } - __name(reflectionScalarDefault, "reflectionScalarDefault"); - exports2.reflectionScalarDefault = reflectionScalarDefault; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js -var require_reflection_binary_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryReader = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var ReflectionBinaryReader = class { + exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); + var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionBinaryReader"); + __name(this, "GetCacheEntryDownloadURLResponse$Type"); } - constructor(info) { - this.info = info; + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_download_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "matched_key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - prepare() { - var _a; - if (!this.fieldNoToField) { - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); - } + create(value) { + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Reads a message from binary format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(reader, message, options, length) { - this.prepare(); - const end = length === void 0 ? reader.len : reader.pos + length; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { - const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); - if (!field) { - let u = options.readUnknownField; - if (u == "throw") - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); - continue; - } - let target = message, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - target = target[field.oneof]; - if (target.oneofKind !== localName) - target = message[field.oneof] = { - oneofKind: localName - }; - } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - let L = field.kind == "scalar" ? field.L : void 0; - if (repeated) { - let arr = target[localName]; - if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { - let e = reader.uint32() + reader.pos; - while (reader.pos < e) - arr.push(this.scalar(reader, T, L)); - } else - arr.push(this.scalar(reader, T, L)); - } else - target[localName] = this.scalar(reader, T, L); + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); break; - case "message": - if (repeated) { - let arr = target[localName]; - let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); - arr.push(msg); - } else - target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + case /* string signed_download_url */ + 2: + message.signedDownloadUrl = reader.string(); break; - case "map": - let [mapKey, mapVal] = this.mapEntry(field, reader, options); - target[localName][mapKey] = mapVal; + case /* string matched_key */ + 3: + message.matchedKey = reader.string(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; } - /** - * Read a map field, expecting key field = 1, value field = 2 - */ - mapEntry(field, reader, options) { - let length = reader.uint32(); - let end = reader.pos + length; - let key = void 0; - let val = void 0; + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedDownloadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); + if (message.matchedKey !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); + var DeleteCacheEntryRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "DeleteCacheEntryRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case 1: - if (field.K == reflection_info_1.ScalarType.BOOL) - key = reader.bool().toString(); - else - key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case 2: - switch (field.V.kind) { - case "scalar": - val = this.scalar(reader, field.V.T, field.V.L); - break; - case "enum": - val = reader.int32(); - break; - case "message": - val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); - break; - } + case /* string key */ + 2: + message.key = reader.string(); break; default: - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - if (key === void 0) { - let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); - key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; - } - if (val === void 0) - switch (field.V.kind) { - case "scalar": - val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); - break; - case "enum": - val = 0; + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); + var DeleteCacheEntryResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "DeleteCacheEntryResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + } + ]); + } + create(value) { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); break; - case "message": - val = field.V.T().create(); + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - return [key, val]; - } - scalar(reader, type, longType) { - switch (type) { - case reflection_info_1.ScalarType.INT32: - return reader.int32(); - case reflection_info_1.ScalarType.STRING: - return reader.string(); - case reflection_info_1.ScalarType.BOOL: - return reader.bool(); - case reflection_info_1.ScalarType.DOUBLE: - return reader.double(); - case reflection_info_1.ScalarType.FLOAT: - return reader.float(); - case reflection_info_1.ScalarType.INT64: - return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); - case reflection_info_1.ScalarType.UINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); - case reflection_info_1.ScalarType.FIXED32: - return reader.fixed32(); - case reflection_info_1.ScalarType.BYTES: - return reader.bytes(); - case reflection_info_1.ScalarType.UINT32: - return reader.uint32(); - case reflection_info_1.ScalarType.SFIXED32: - return reader.sfixed32(); - case reflection_info_1.ScalarType.SFIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); - case reflection_info_1.ScalarType.SINT32: - return reader.sint32(); - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionBinaryReader = ReflectionBinaryReader; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js -var require_reflection_binary_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryWriter = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var pb_long_1 = require_pb_long(); - var ReflectionBinaryWriter = class { + exports2.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); + var ListCacheEntriesRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionBinaryWriter"); + __name(this, "ListCacheEntriesRequest$Type"); } - constructor(info) { - this.info = info; + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + } + ]); } - prepare() { - if (!this.fields) { - const fieldsInput = this.info.fields ? this.info.fields.concat() : []; - this.fields = fieldsInput.sort((a, b) => a.no - b.no); - } + create(value) { + const message = { key: "", restoreKeys: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Writes the message to binary format. - */ - write(message, writer, options) { - this.prepare(); - for (const field of this.fields) { - let value, emitDefault, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - const group = message[field.oneof]; - if (group.oneofKind !== localName) - continue; - value = group[localName]; - emitDefault = true; - } else { - value = message[localName]; - emitDefault = false; - } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (repeated) { - assert_1.assert(Array.isArray(value)); - if (repeated == reflection_info_1.RepeatType.PACKED) - this.packed(writer, T, field.no, value); - else - for (const item of value) - this.scalar(writer, T, field.no, item, true); - } else if (value === void 0) - assert_1.assert(field.opt); - else - this.scalar(writer, T, field.no, value, emitDefault || field.opt); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case "message": - if (repeated) { - assert_1.assert(Array.isArray(value)); - for (const item of value) - this.message(writer, options, field.T(), field.no, item); - } else { - this.message(writer, options, field.T(), field.no, value); - } + case /* string key */ + 2: + message.key = reader.string(); break; - case "map": - assert_1.assert(typeof value == "object" && value !== null); - for (const [key, val] of Object.entries(value)) - this.mapEntry(writer, options, field, key, val); + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); let u = options.writeUnknownFields; if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); - } - mapEntry(writer, options, field, key, value) { - writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let keyValue = key; - switch (field.K) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - keyValue = Number.parseInt(key); - break; - case reflection_info_1.ScalarType.BOOL: - assert_1.assert(key == "true" || key == "false"); - keyValue = key == "true"; - break; - } - this.scalar(writer, field.K, 1, keyValue, true); - switch (field.V.kind) { - case "scalar": - this.scalar(writer, field.V.T, 2, value, true); - break; - case "enum": - this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); - break; - case "message": - this.message(writer, options, field.V.T(), 2, value); - break; - } - writer.join(); + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - message(writer, options, handler, fieldNo, value) { - if (value === void 0) - return; - handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); - writer.join(); + }; + exports2.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); + var ListCacheEntriesResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "ListCacheEntriesResponse$Type"); } - /** - * Write a single scalar value. - */ - scalar(writer, type, fieldNo, value, emitDefault) { - let [wireType, method, isDefault] = this.scalarInfo(type, value); - if (!isDefault || emitDefault) { - writer.tag(fieldNo, wireType); - writer[method](value); - } + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse", [ + { no: 1, name: "entries", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } + ]); } - /** - * Write an array of scalar values in packed format. - */ - packed(writer, type, fieldNo, value) { - if (!value.length) - return; - assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); - writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let [, method] = this.scalarInfo(type); - for (let i = 0; i < value.length; i++) - writer[method](value[i]); - writer.join(); + create(value) { + const message = { entries: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Get information for writing a scalar value. - * - * Returns tuple: - * [0]: appropriate WireType - * [1]: name of the appropriate method of IBinaryWriter - * [2]: whether the given value is a default value - * - * If argument `value` is omitted, [2] is always false. - */ - scalarInfo(type, value) { - let t = binary_format_contract_1.WireType.Varint; - let m; - let i = value === void 0; - let d = value === 0; - switch (type) { - case reflection_info_1.ScalarType.INT32: - m = "int32"; - break; - case reflection_info_1.ScalarType.STRING: - d = i || !value.length; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "string"; - break; - case reflection_info_1.ScalarType.BOOL: - d = value === false; - m = "bool"; - break; - case reflection_info_1.ScalarType.UINT32: - m = "uint32"; - break; - case reflection_info_1.ScalarType.DOUBLE: - t = binary_format_contract_1.WireType.Bit64; - m = "double"; - break; - case reflection_info_1.ScalarType.FLOAT: - t = binary_format_contract_1.WireType.Bit32; - m = "float"; - break; - case reflection_info_1.ScalarType.INT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "int64"; - break; - case reflection_info_1.ScalarType.UINT64: - d = i || pb_long_1.PbULong.from(value).isZero(); - m = "uint64"; - break; - case reflection_info_1.ScalarType.FIXED64: - d = i || pb_long_1.PbULong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "fixed64"; - break; - case reflection_info_1.ScalarType.BYTES: - d = i || !value.byteLength; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "bytes"; - break; - case reflection_info_1.ScalarType.FIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "fixed32"; - break; - case reflection_info_1.ScalarType.SFIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "sfixed32"; - break; - case reflection_info_1.ScalarType.SFIXED64: - d = i || pb_long_1.PbLong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "sfixed64"; - break; - case reflection_info_1.ScalarType.SINT32: - m = "sint32"; - break; - case reflection_info_1.ScalarType.SINT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "sint64"; - break; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.entities.v1.CacheEntry entries */ + 1: + message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - return [t, m, i || d]; + return message; + } + internalBinaryWrite(message, writer, options) { + for (let i = 0; i < message.entries.length; i++) + cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js -var require_reflection_create = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionCreate = void 0; - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var message_type_contract_1 = require_message_type_contract(); - function reflectionCreate(type) { - const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); - for (let field of type.fields) { - let name = field.localName; - if (field.opt) - continue; - if (field.oneof) - msg[field.oneof] = { oneofKind: void 0 }; - else if (field.repeat) - msg[name] = []; - else - switch (field.kind) { - case "scalar": - msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + exports2.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); + var LookupCacheEntryRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "LookupCacheEntryRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case "enum": - msg[name] = 0; + case /* string key */ + 2: + message.key = reader.string(); break; - case "map": - msg[name] = {}; + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } + } + return message; } - return msg; - } - __name(reflectionCreate, "reflectionCreate"); - exports2.reflectionCreate = reflectionCreate; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js -var require_reflection_merge_partial = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionMergePartial = void 0; - function reflectionMergePartial(info, target, source) { - let fieldValue, input = source, output; - for (let field of info.fields) { - let name = field.localName; - if (field.oneof) { - const group = input[field.oneof]; - if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { - continue; - } - fieldValue = group[name]; - output = target[field.oneof]; - output.oneofKind = group.oneofKind; - if (fieldValue == void 0) { - delete output[name]; - continue; - } - } else { - fieldValue = input[name]; - output = target; - if (fieldValue == void 0) { - continue; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); + var LookupCacheEntryResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "LookupCacheEntryResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse", [ + { + no: 1, + name: "exists", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { no: 2, name: "entry", kind: "message", T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } + ]); + } + create(value) { + const message = { exists: false }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool exists */ + 1: + message.exists = reader.bool(); + break; + case /* github.actions.results.entities.v1.CacheEntry entry */ + 2: + message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - if (field.repeat) - output[name].length = fieldValue.length; - switch (field.kind) { - case "scalar": - case "enum": - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = fieldValue[i]; - else - output[name] = fieldValue; - break; - case "message": - let T = field.T(); - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = T.create(fieldValue[i]); - else if (output[name] === void 0) - output[name] = T.create(fieldValue); - else - T.mergePartial(output[name], fieldValue); - break; - case "map": - switch (field.V.kind) { - case "scalar": - case "enum": - Object.assign(output[name], fieldValue); - break; - case "message": - let T2 = field.V.T(); - for (let k of Object.keys(fieldValue)) - output[name][k] = T2.create(fieldValue[k]); - break; - } - break; - } + return message; } - } - __name(reflectionMergePartial, "reflectionMergePartial"); - exports2.reflectionMergePartial = reflectionMergePartial; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js -var require_reflection_equals = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionEquals = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionEquals(info, a, b) { - if (a === b) - return true; - if (!a || !b) - return false; - for (let field of info.fields) { - let localName = field.localName; - let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; - let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; - switch (field.kind) { - case "enum": - case "scalar": - let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) - return false; - break; - case "map": - if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) - return false; - break; - case "message": - let T = field.T(); - if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) - return false; - break; - } + internalBinaryWrite(message, writer, options) { + if (message.exists !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.exists); + if (message.entry) + cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return true; - } - __name(reflectionEquals, "reflectionEquals"); - exports2.reflectionEquals = reflectionEquals; - var objectValues = Object.values; - function primitiveEq(type, a, b) { - if (a === b) - return true; - if (type !== reflection_info_1.ScalarType.BYTES) - return false; - let ba = a; - let bb = b; - if (ba.length !== bb.length) - return false; - for (let i = 0; i < ba.length; i++) - if (ba[i] != bb[i]) - return false; - return true; - } - __name(primitiveEq, "primitiveEq"); - function repeatedPrimitiveEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!primitiveEq(type, a[i], b[i])) - return false; - return true; - } - __name(repeatedPrimitiveEq, "repeatedPrimitiveEq"); - function repeatedMsgEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!type.equals(a[i], b[i])) - return false; - return true; - } - __name(repeatedMsgEq, "repeatedMsgEq"); + }; + exports2.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); + exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse }, + { name: "DeleteCacheEntry", options: {}, I: exports2.DeleteCacheEntryRequest, O: exports2.DeleteCacheEntryResponse }, + { name: "ListCacheEntries", options: {}, I: exports2.ListCacheEntriesRequest, O: exports2.ListCacheEntriesResponse }, + { name: "LookupCacheEntry", options: {}, I: exports2.LookupCacheEntryRequest, O: exports2.LookupCacheEntryResponse } + ]); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js -var require_message_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js +var require_cache_twirp = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_type_check_1 = require_reflection_type_check(); - var reflection_json_reader_1 = require_reflection_json_reader(); - var reflection_json_writer_1 = require_reflection_json_writer(); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - var reflection_create_1 = require_reflection_create(); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - var json_typings_1 = require_json_typings(); - var json_format_contract_1 = require_json_format_contract(); - var reflection_equals_1 = require_reflection_equals(); - var binary_writer_1 = require_binary_writer(); - var binary_reader_1 = require_binary_reader(); - var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); - var MessageType = class { + exports2.createCacheServiceServer = exports2.CacheServiceMethodList = exports2.CacheServiceMethod = exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; + var twirp_ts_1 = require_twirp(); + var cache_1 = require_cache2(); + var CacheServiceClientJSON = class { static { - __name(this, "MessageType"); + __name(this, "CacheServiceClientJSON"); } - constructor(name, fields, options) { - this.defaultCheckDepth = 16; - this.typeName = name; - this.fields = fields.map(reflection_info_1.normalizeFieldInfo); - this.options = options !== null && options !== void 0 ? options : {}; - this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); - this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); - this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); - this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); - this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); - this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); } - create(value) { - let message = reflection_create_1.reflectionCreate(this); - if (value !== void 0) { - reflection_merge_partial_1.reflectionMergePartial(this, message, value); - } - return message; + CreateCacheEntry(request) { + const data = cache_1.CreateCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - /** - * Clone the message. - * - * Unknown fields are discarded. - */ - clone(message) { - let copy = this.create(); - reflection_merge_partial_1.reflectionMergePartial(this, copy, message); - return copy; + FinalizeCacheEntryUpload(request) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - /** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. - */ - equals(a, b) { - return reflection_equals_1.reflectionEquals(this, a, b); + GetCacheEntryDownloadURL(request) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - /** - * Is the given value assignable to our message type - * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - is(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, false); + DeleteCacheEntry(request) { + const data = cache_1.DeleteCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - /** - * Is the given value assignable to our message type, - * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - isAssignable(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, true); + ListCacheEntries(request) { + const data = cache_1.ListCacheEntriesRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data); + return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + LookupCacheEntry(request) { + const data = cache_1.LookupCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + }; + exports2.CacheServiceClientJSON = CacheServiceClientJSON; + var CacheServiceClientProtobuf = class { + static { + __name(this, "CacheServiceClientProtobuf"); + } + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); } - /** - * Copy partial data into the target message. - */ - mergePartial(target, source) { - reflection_merge_partial_1.reflectionMergePartial(this, target, source); + CreateCacheEntry(request) { + const data = cache_1.CreateCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); } - /** - * Create a new message from binary format. - */ - fromBinary(data, options) { - let opt = binary_reader_1.binaryReadOptions(options); - return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + FinalizeCacheEntryUpload(request) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); } - /** - * Read a new message from a JSON value. - */ - fromJson(json, options) { - return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); + GetCacheEntryDownloadURL(request) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); } - /** - * Read a new message from a JSON string. - * This is equivalent to `T.fromJson(JSON.parse(json))`. - */ - fromJsonString(json, options) { - let value = JSON.parse(json); - return this.fromJson(value, options); + DeleteCacheEntry(request) { + const data = cache_1.DeleteCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromBinary(data2)); } - /** - * Write the message to canonical JSON value. - */ - toJson(message, options) { - return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + ListCacheEntries(request) { + const data = cache_1.ListCacheEntriesRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data); + return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromBinary(data2)); } - /** - * Convert the message to canonical JSON string. - * This is equivalent to `JSON.stringify(T.toJson(t))` - */ - toJsonString(message, options) { - var _a; - let value = this.toJson(message, options); - return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + LookupCacheEntry(request) { + const data = cache_1.LookupCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromBinary(data2)); } - /** - * Write the message to binary format. - */ - toBinary(message, options) { - let opt = binary_writer_1.binaryWriteOptions(options); - return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + }; + exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; + var CacheServiceMethod; + (function(CacheServiceMethod2) { + CacheServiceMethod2["CreateCacheEntry"] = "CreateCacheEntry"; + CacheServiceMethod2["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload"; + CacheServiceMethod2["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL"; + CacheServiceMethod2["DeleteCacheEntry"] = "DeleteCacheEntry"; + CacheServiceMethod2["ListCacheEntries"] = "ListCacheEntries"; + CacheServiceMethod2["LookupCacheEntry"] = "LookupCacheEntry"; + })(CacheServiceMethod || (exports2.CacheServiceMethod = CacheServiceMethod = {})); + exports2.CacheServiceMethodList = [ + CacheServiceMethod.CreateCacheEntry, + CacheServiceMethod.FinalizeCacheEntryUpload, + CacheServiceMethod.GetCacheEntryDownloadURL, + CacheServiceMethod.DeleteCacheEntry, + CacheServiceMethod.ListCacheEntries, + CacheServiceMethod.LookupCacheEntry + ]; + function createCacheServiceServer(service) { + return new twirp_ts_1.TwirpServer({ + service, + packageName: "github.actions.results.api.v1", + serviceName: "CacheService", + methodList: exports2.CacheServiceMethodList, + matchRoute: matchCacheServiceRoute + }); + } + __name(createCacheServiceServer, "createCacheServiceServer"); + exports2.createCacheServiceServer = createCacheServiceServer; + function matchCacheServiceRoute(method, events) { + switch (method) { + case "CreateCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors); + }); + case "FinalizeCacheEntryUpload": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" }); + yield events.onMatch(ctx); + return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors); + }); + case "GetCacheEntryDownloadURL": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" }); + yield events.onMatch(ctx); + return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors); + }); + case "DeleteCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors); + }); + case "ListCacheEntries": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" }); + yield events.onMatch(ctx); + return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors); + }); + case "LookupCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors); + }); + default: + events.onNotFound(); + const msg = `no handler found`; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - /** - * This is an internal method. If you just want to read a message from - * JSON, use `fromJson()` or `fromJsonString()`. - * - * Reads JSON value and merges the fields into the target - * according to protobuf rules. If the target is omitted, - * a new instance is created first. - */ - internalJsonRead(json, options, target) { - if (json !== null && typeof json == "object" && !Array.isArray(json)) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refJsonReader.read(json, message, options); - return message; - } - throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); + } + __name(matchCacheServiceRoute, "matchCacheServiceRoute"); + function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - /** - * This is an internal method. If you just want to write a message - * to JSON, use `toJson()` or `toJsonString(). - * - * Writes JSON value and returns it. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.write(message, options); + } + __name(handleCacheServiceCreateCacheEntryRequest, "handleCacheServiceCreateCacheEntryRequest"); + function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - /** - * This is an internal method. If you just want to write a message - * in binary format, use `toBinary()`. - * - * Serializes the message in binary format and appends it to the given - * writer. Returns passed writer. - */ - internalBinaryWrite(message, writer, options) { - this.refBinWriter.write(message, writer, options); - return writer; + } + __name(handleCacheServiceFinalizeCacheEntryUploadRequest, "handleCacheServiceFinalizeCacheEntryUploadRequest"); + function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - /** - * This is an internal method. If you just want to read a message from - * binary data, use `fromBinary()`. - * - * Reads data from binary format and merges the fields into - * the target according to protobuf rules. If the target is - * omitted, a new instance is created first. - */ - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refBinReader.read(reader, message, options, length); - return message; + } + __name(handleCacheServiceGetCacheEntryDownloadURLRequest, "handleCacheServiceGetCacheEntryDownloadURLRequest"); + function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - }; - exports2.MessageType = MessageType; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js -var require_reflection_contains_message_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.containsMessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - function containsMessageType(msg) { - return msg[message_type_contract_1.MESSAGE_TYPE] != null; } - __name(containsMessageType, "containsMessageType"); - exports2.containsMessageType = containsMessageType; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js -var require_enum_object = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; - function isEnumObject(arg) { - if (typeof arg != "object" || arg === null) { - return false; + __name(handleCacheServiceDeleteCacheEntryRequest, "handleCacheServiceDeleteCacheEntryRequest"); + function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - if (!arg.hasOwnProperty(0)) { - return false; + } + __name(handleCacheServiceListCacheEntriesRequest, "handleCacheServiceListCacheEntriesRequest"); + function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } - for (let k of Object.keys(arg)) { - let num = parseInt(k); - if (!Number.isNaN(num)) { - let nam = arg[num]; - if (nam === void 0) - return false; - if (arg[nam] !== num) - return false; + } + __name(handleCacheServiceLookupCacheEntryRequest, "handleCacheServiceLookupCacheEntryRequest"); + function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.CreateCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.CreateCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.CreateCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceCreateCacheEntryJSON, "handleCacheServiceCreateCacheEntryJSON"); + function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx2, inputReq); + }); + } else { + response = yield service.FinalizeCacheEntryUpload(ctx, request); + } + return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceFinalizeCacheEntryUploadJSON, "handleCacheServiceFinalizeCacheEntryUploadJSON"); + function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx2, inputReq); + }); + } else { + response = yield service.GetCacheEntryDownloadURL(ctx, request); + } + return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceGetCacheEntryDownloadURLJSON, "handleCacheServiceGetCacheEntryDownloadURLJSON"); + function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.DeleteCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.DeleteCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.DeleteCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceDeleteCacheEntryJSON, "handleCacheServiceDeleteCacheEntryJSON"); + function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.ListCacheEntriesRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.ListCacheEntries(ctx2, inputReq); + }); + } else { + response = yield service.ListCacheEntries(ctx, request); + } + return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceListCacheEntriesJSON, "handleCacheServiceListCacheEntriesJSON"); + function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.LookupCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.LookupCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.LookupCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceLookupCacheEntryJSON, "handleCacheServiceLookupCacheEntryJSON"); + function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.CreateCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.CreateCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.CreateCacheEntry(ctx, request); + } + return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response)); + }); + } + __name(handleCacheServiceCreateCacheEntryProtobuf, "handleCacheServiceCreateCacheEntryProtobuf"); + function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx2, inputReq); + }); } else { - let num2 = arg[k]; - if (num2 === void 0) - return false; - if (typeof num2 !== "number") - return false; - if (arg[num2] === void 0) - return false; + response = yield service.FinalizeCacheEntryUpload(ctx, request); } - } - return true; + return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response)); + }); } - __name(isEnumObject, "isEnumObject"); - exports2.isEnumObject = isEnumObject; - function listEnumValues(enumObject) { - if (!isEnumObject(enumObject)) - throw new Error("not a typescript enum object"); - let values = []; - for (let [name, number] of Object.entries(enumObject)) - if (typeof number == "number") - values.push({ name, number }); - return values; + __name(handleCacheServiceFinalizeCacheEntryUploadProtobuf, "handleCacheServiceFinalizeCacheEntryUploadProtobuf"); + function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx2, inputReq); + }); + } else { + response = yield service.GetCacheEntryDownloadURL(ctx, request); + } + return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response)); + }); } - __name(listEnumValues, "listEnumValues"); - exports2.listEnumValues = listEnumValues; - function listEnumNames(enumObject) { - return listEnumValues(enumObject).map((val) => val.name); + __name(handleCacheServiceGetCacheEntryDownloadURLProtobuf, "handleCacheServiceGetCacheEntryDownloadURLProtobuf"); + function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.DeleteCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.DeleteCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.DeleteCacheEntry(ctx, request); + } + return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response)); + }); } - __name(listEnumNames, "listEnumNames"); - exports2.listEnumNames = listEnumNames; - function listEnumNumbers(enumObject) { - return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); + __name(handleCacheServiceDeleteCacheEntryProtobuf, "handleCacheServiceDeleteCacheEntryProtobuf"); + function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.ListCacheEntriesRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.ListCacheEntries(ctx2, inputReq); + }); + } else { + response = yield service.ListCacheEntries(ctx, request); + } + return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response)); + }); } - __name(listEnumNumbers, "listEnumNumbers"); - exports2.listEnumNumbers = listEnumNumbers; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs7 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var json_typings_1 = require_json_typings(); - Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_typings_1.typeofJsonValue; - }, "get") }); - Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_typings_1.isJsonObject; - }, "get") }); - var base64_1 = require_base642(); - Object.defineProperty(exports2, "base64decode", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return base64_1.base64decode; - }, "get") }); - Object.defineProperty(exports2, "base64encode", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return base64_1.base64encode; - }, "get") }); - var protobufjs_utf8_1 = require_protobufjs_utf8(); - Object.defineProperty(exports2, "utf8read", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return protobufjs_utf8_1.utf8read; - }, "get") }); - var binary_format_contract_1 = require_binary_format_contract(); - Object.defineProperty(exports2, "WireType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.WireType; - }, "get") }); - Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.mergeBinaryOptions; - }, "get") }); - Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.UnknownFieldHandler; - }, "get") }); - var binary_reader_1 = require_binary_reader(); - Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_reader_1.BinaryReader; - }, "get") }); - Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_reader_1.binaryReadOptions; - }, "get") }); - var binary_writer_1 = require_binary_writer(); - Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_writer_1.BinaryWriter; - }, "get") }); - Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_writer_1.binaryWriteOptions; - }, "get") }); - var pb_long_1 = require_pb_long(); - Object.defineProperty(exports2, "PbLong", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return pb_long_1.PbLong; - }, "get") }); - Object.defineProperty(exports2, "PbULong", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return pb_long_1.PbULong; - }, "get") }); - var json_format_contract_1 = require_json_format_contract(); - Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.jsonReadOptions; - }, "get") }); - Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.jsonWriteOptions; - }, "get") }); - Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.mergeJsonOptions; - }, "get") }); - var message_type_contract_1 = require_message_type_contract(); - Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return message_type_contract_1.MESSAGE_TYPE; - }, "get") }); - var message_type_1 = require_message_type(); - Object.defineProperty(exports2, "MessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return message_type_1.MessageType; - }, "get") }); - var reflection_info_1 = require_reflection_info(); - Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.ScalarType; - }, "get") }); - Object.defineProperty(exports2, "LongType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.LongType; - }, "get") }); - Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.RepeatType; - }, "get") }); - Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.normalizeFieldInfo; - }, "get") }); - Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readFieldOptions; - }, "get") }); - Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readFieldOption; - }, "get") }); - Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMessageOption; - }, "get") }); - var reflection_type_check_1 = require_reflection_type_check(); - Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_type_check_1.ReflectionTypeCheck; - }, "get") }); - var reflection_create_1 = require_reflection_create(); - Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_create_1.reflectionCreate; - }, "get") }); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_scalar_default_1.reflectionScalarDefault; - }, "get") }); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_merge_partial_1.reflectionMergePartial; - }, "get") }); - var reflection_equals_1 = require_reflection_equals(); - Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_equals_1.reflectionEquals; - }, "get") }); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_binary_reader_1.ReflectionBinaryReader; - }, "get") }); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_binary_writer_1.ReflectionBinaryWriter; - }, "get") }); - var reflection_json_reader_1 = require_reflection_json_reader(); - Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_json_reader_1.ReflectionJsonReader; - }, "get") }); - var reflection_json_writer_1 = require_reflection_json_writer(); - Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_json_writer_1.ReflectionJsonWriter; - }, "get") }); - var reflection_contains_message_type_1 = require_reflection_contains_message_type(); - Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_contains_message_type_1.containsMessageType; - }, "get") }); - var oneof_1 = require_oneof(); - Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.isOneofGroup; - }, "get") }); - Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.setOneofValue; - }, "get") }); - Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.getOneofValue; - }, "get") }); - Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.clearOneofValue; - }, "get") }); - Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.getSelectedOneofValue; - }, "get") }); - var enum_object_1 = require_enum_object(); - Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumValues; - }, "get") }); - Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumNames; - }, "get") }); - Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumNumbers; - }, "get") }); - Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.isEnumObject; - }, "get") }); - var lower_camel_case_1 = require_lower_camel_case(); - Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return lower_camel_case_1.lowerCamelCase; - }, "get") }); - var assert_1 = require_assert(); - Object.defineProperty(exports2, "assert", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assert; - }, "get") }); - Object.defineProperty(exports2, "assertNever", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertNever; - }, "get") }); - Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertInt32; - }, "get") }); - Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertUInt32; - }, "get") }); - Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertFloat32; - }, "get") }); + __name(handleCacheServiceListCacheEntriesProtobuf, "handleCacheServiceListCacheEntriesProtobuf"); + function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.LookupCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.LookupCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.LookupCacheEntry(ctx, request); + } + return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response)); + }); + } + __name(handleCacheServiceLookupCacheEntryProtobuf, "handleCacheServiceLookupCacheEntryProtobuf"); } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js -var require_reflection_info2 = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { +// ../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js +var require_cacheTwirpClient = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs7(); - function normalizeMethodInfo(method, service) { - var _a, _b, _c; - let m = method; - m.service = service; - m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); - m.serverStreaming = !!m.serverStreaming; - m.clientStreaming = !!m.clientStreaming; - m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; - m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; - return m; - } - __name(normalizeMethodInfo, "normalizeMethodInfo"); - exports2.normalizeMethodInfo = normalizeMethodInfo; - function readMethodOptions(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; - } - __name(readMethodOptions, "readMethodOptions"); - exports2.readMethodOptions = readMethodOptions; - function readMethodOption(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; + exports2.internalCacheTwirpClient = void 0; + var core_1 = require_core(); + var user_agent_1 = require_user_agent(); + var errors_1 = require_errors2(); + var config_1 = require_config(); + var cacheUtils_1 = require_cacheUtils(); + var auth_1 = require_auth(); + var http_client_1 = require_lib(); + var cache_twirp_1 = require_cache_twirp(); + var CacheServiceClient = class { + static { + __name(this, "CacheServiceClient"); } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { + this.maxAttempts = 5; + this.baseRetryIntervalMilliseconds = 3e3; + this.retryMultiplier = 1.5; + const token = (0, cacheUtils_1.getRuntimeToken)(); + this.baseUrl = (0, config_1.getCacheServiceURL)(); + if (maxAttempts) { + this.maxAttempts = maxAttempts; + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier; + } + this.httpClient = new http_client_1.HttpClient(userAgent, [ + new auth_1.BearerCredentialHandler(token) + ]); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - __name(readMethodOption, "readMethodOption"); - exports2.readMethodOption = readMethodOption; - function readServiceOption(service, extensionName, extensionType) { - const options = service.options; - if (!options) { - return void 0; + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + request(service, method, contentType, data) { + return __awaiter3(this, void 0, void 0, function* () { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; + (0, core_1.debug)(`[Request] ${method} ${url}`); + const headers = { + "Content-Type": contentType + }; + try { + const { body } = yield this.retryableRequest(() => __awaiter3(this, void 0, void 0, function* () { + return this.httpClient.post(url, JSON.stringify(data), headers); + })); + return body; + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`); + } + }); + } + retryableRequest(operation) { + return __awaiter3(this, void 0, void 0, function* () { + let attempt = 0; + let errorMessage = ""; + let rawBody = ""; + while (attempt < this.maxAttempts) { + let isRetryable = false; + try { + const response = yield operation(); + const statusCode = response.message.statusCode; + rawBody = yield response.readBody(); + (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); + (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); + const body = JSON.parse(rawBody); + (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); + if (this.isSuccessStatusCode(statusCode)) { + return { response, body }; + } + isRetryable = this.isRetryableHttpStatusCode(statusCode); + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; + if (body.msg) { + if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { + throw new errors_1.UsageError(); + } + errorMessage = `${errorMessage}: ${body.msg}`; + } + } catch (error) { + if (error instanceof SyntaxError) { + (0, core_1.debug)(`Raw Body: ${rawBody}`); + } + if (error instanceof errors_1.UsageError) { + throw error; + } + if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { + throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + } + isRetryable = true; + errorMessage = error.message; + } + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`); + } + if (attempt + 1 === this.maxAttempts) { + throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); + } + const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); + (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); + yield this.sleep(retryTimeMilliseconds); + attempt++; + } + throw new Error(`Request failed`); + }); + } + isSuccessStatusCode(statusCode) { + if (!statusCode) + return false; + return statusCode >= 200 && statusCode < 300; + } + isRetryableHttpStatusCode(statusCode) { + if (!statusCode) + return false; + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.TooManyRequests + ]; + return retryableStatusCodes.includes(statusCode); + } + sleep(milliseconds) { + return __awaiter3(this, void 0, void 0, function* () { + return new Promise((resolve) => setTimeout(resolve, milliseconds)); + }); } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + getExponentialRetryTimeMilliseconds(attempt) { + if (attempt < 0) { + throw new Error("attempt should be a positive integer"); + } + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds; + } + const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); + const maxTime = minTime * this.retryMultiplier; + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; + }; + function internalCacheTwirpClient(options) { + const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); + return new cache_twirp_1.CacheServiceClientJSON(client); } - __name(readServiceOption, "readServiceOption"); - exports2.readServiceOption = readServiceOption; + __name(internalCacheTwirpClient, "internalCacheTwirpClient"); + exports2.internalCacheTwirpClient = internalCacheTwirpClient; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js -var require_service_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { +// ../node_modules/@actions/cache/lib/internal/tar.js +var require_tar = __commonJS({ + "../node_modules/@actions/cache/lib/internal/tar.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceType = void 0; - var reflection_info_1 = require_reflection_info2(); - var ServiceType = class { - static { - __name(this, "ServiceType"); + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - constructor(typeName, methods, options) { - this.typeName = typeName; - this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); - this.options = options !== null && options !== void 0 ? options : {}; + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } + __setModuleDefault3(result, mod); + return result; }; - exports2.ServiceType = ServiceType; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js -var require_rpc_error = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcError = void 0; - var RpcError = class extends Error { - static { - __name(this, "RpcError"); - } - constructor(message, code = "UNKNOWN", meta) { - super(message); - this.name = "RpcError"; - Object.setPrototypeOf(this, new.target.prototype); - this.code = code; - this.meta = meta !== null && meta !== void 0 ? meta : {}; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - toString() { - const l = [this.name + ": " + this.message]; - if (this.code) { - l.push(""); - l.push("Code: " + this.code); - } - if (this.serviceName && this.methodName) { - l.push("Method: " + this.serviceName + "/" + this.methodName); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - let m = Object.entries(this.meta); - if (m.length) { - l.push(""); - l.push("Meta:"); - for (let [k, v] of m) { - l.push(` ${k}: ${v}`); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } } - return l.join("\n"); - } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - exports2.RpcError = RpcError; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js -var require_rpc_options = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs7(); - function mergeRpcOptions(defaults, options) { - if (!options) - return defaults; - let o = {}; - copy(defaults, o); - copy(options, o); - for (let key of Object.keys(options)) { - let val = options[key]; - switch (key) { - case "jsonOptions": - o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + exports2.createTar = exports2.extractTar = exports2.listTar = void 0; + var exec_1 = require_exec(); + var io2 = __importStar3(require_io()); + var fs_1 = require("fs"); + var path2 = __importStar3(require("path")); + var utils = __importStar3(require_cacheUtils()); + var constants_1 = require_constants7(); + var IS_WINDOWS = process.platform === "win32"; + function getTarPath() { + return __awaiter3(this, void 0, void 0, function* () { + switch (process.platform) { + case "win32": { + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + } break; - case "binaryOptions": - o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + } + case "darwin": { + const gnuTar = yield io2.which("gtar", false); + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else { + return { + path: yield io2.which("tar", true), + type: constants_1.ArchiveToolType.BSD + }; + } + } + default: break; - case "meta": - o.meta = {}; - copy(defaults.meta, o.meta); - copy(options.meta, o.meta); + } + return { + path: yield io2.which("tar", true), + type: constants_1.ArchiveToolType.GNU + }; + }); + } + __name(getTarPath, "getTarPath"); + function getTarArgs(tarPath, compressionMethod, type, archivePath = "") { + return __awaiter3(this, void 0, void 0, function* () { + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = "cache.tar"; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (type) { + case "create": + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; - case "interceptors": - o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + case "extract": + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/")); + break; + case "list": + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P"); break; } - } - return o; + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case "win32": + args.push("--force-local"); + break; + case "darwin": + args.push("--delay-directory-restore"); + break; + } + } + return args; + }); } - __name(mergeRpcOptions, "mergeRpcOptions"); - exports2.mergeRpcOptions = mergeRpcOptions; - function copy(a, into) { - if (!a) - return; - let c = into; - for (let [k, v] of Object.entries(a)) { - if (v instanceof Date) - c[k] = new Date(v.getTime()); - else if (Array.isArray(v)) - c[k] = v.concat(); - else - c[k] = v; - } + __name(getTarArgs, "getTarArgs"); + function getCommands(compressionMethod, type, archivePath = "") { + return __awaiter3(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== "create") { + args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; + } else { + args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(" ")]; + }); } - __name(copy, "copy"); - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js -var require_deferred = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Deferred = exports2.DeferredState = void 0; - var DeferredState; - (function(DeferredState2) { - DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; - DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; - DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; - })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); - var Deferred = class { - static { - __name(this, "Deferred"); - } - /** - * @param preventUnhandledRejectionWarning - prevents the warning - * "Unhandled Promise rejection" by adding a noop rejection handler. - * Working with calls returned from the runtime-rpc package in an - * async function usually means awaiting one call property after - * the other. This means that the "status" is not being awaited when - * an earlier await for the "headers" is rejected. This causes the - * "unhandled promise reject" warning. A more correct behaviour for - * calls might be to become aware whether at least one of the - * promises is handled and swallow the rejection warning for the - * others. - */ - constructor(preventUnhandledRejectionWarning = true) { - this._state = DeferredState.PENDING; - this._promise = new Promise((resolve, reject) => { - this._resolve = resolve; - this._reject = reject; - }); - if (preventUnhandledRejectionWarning) { - this._promise.catch((_2) => { - }); + __name(getCommands, "getCommands"); + function getWorkingDirectory() { + var _a; + return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); + } + __name(getWorkingDirectory, "getWorkingDirectory"); + function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter3(this, void 0, void 0, function* () { + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -d --long=30 --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -d --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; + default: + return ["-z"]; } - } - /** - * Get the current state of the promise. - */ - get state() { - return this._state; - } - /** - * Get the deferred promise. - */ - get promise() { - return this._promise; - } - /** - * Resolve the promise. Throws if the promise is already resolved or rejected. - */ - resolve(value) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); - this._resolve(value); - this._state = DeferredState.RESOLVED; - } - /** - * Reject the promise. Throws if the promise is already resolved or rejected. - */ - reject(reason) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); - this._reject(reason); - this._state = DeferredState.REJECTED; - } - /** - * Resolve the promise. Ignore if not pending. - */ - resolvePending(val) { - if (this._state === DeferredState.PENDING) - this.resolve(val); - } - /** - * Reject the promise. Ignore if not pending. - */ - rejectPending(reason) { - if (this._state === DeferredState.PENDING) - this.reject(reason); - } - }; - exports2.Deferred = Deferred; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js -var require_rpc_output_stream = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcOutputStreamController = void 0; - var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs7(); - var RpcOutputStreamController = class { - static { - __name(this, "RpcOutputStreamController"); - } - constructor() { - this._lis = { - nxt: [], - msg: [], - err: [], - cmp: [] - }; - this._closed = false; - } - // --- RpcOutputStream callback API - onNext(callback) { - return this.addLis(callback, this._lis.nxt); - } - onMessage(callback) { - return this.addLis(callback, this._lis.msg); - } - onError(callback) { - return this.addLis(callback, this._lis.err); - } - onComplete(callback) { - return this.addLis(callback, this._lis.cmp); - } - addLis(callback, list) { - list.push(callback); - return () => { - let i = list.indexOf(callback); - if (i >= 0) - list.splice(i, 1); - }; - } - // remove all listeners - clearLis() { - for (let l of Object.values(this._lis)) - l.splice(0, l.length); - } - // --- Controller API - /** - * Is this stream already closed by a completion or error? - */ - get closed() { - return this._closed !== false; - } - /** - * Emit message, close with error, or close successfully, but only one - * at a time. - * Can be used to wrap a stream by using the other stream's `onNext`. - */ - notifyNext(message, error, complete) { - runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); - if (message) - this.notifyMessage(message); - if (error) - this.notifyError(error); - if (complete) - this.notifyComplete(); - } - /** - * Emits a new message. Throws if stream is closed. - * - * Triggers onNext and onMessage callbacks. - */ - notifyMessage(message) { - runtime_1.assert(!this.closed, "stream is closed"); - this.pushIt({ value: message, done: false }); - this._lis.msg.forEach((l) => l(message)); - this._lis.nxt.forEach((l) => l(message, void 0, false)); - } - /** - * Closes the stream with an error. Throws if stream is closed. - * - * Triggers onNext and onError callbacks. - */ - notifyError(error) { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = error; - this.pushIt(error); - this._lis.err.forEach((l) => l(error)); - this._lis.nxt.forEach((l) => l(void 0, error, false)); - this.clearLis(); - } - /** - * Closes the stream successfully. Throws if stream is closed. - * - * Triggers onNext and onComplete callbacks. - */ - notifyComplete() { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = true; - this.pushIt({ value: null, done: true }); - this._lis.cmp.forEach((l) => l()); - this._lis.nxt.forEach((l) => l(void 0, void 0, true)); - this.clearLis(); - } - /** - * Creates an async iterator (that can be used with `for await {...}`) - * to consume the stream. - * - * Some things to note: - * - If an error occurs, the `for await` will throw it. - * - If an error occurred before the `for await` was started, `for await` - * will re-throw it. - * - If the stream is already complete, the `for await` will be empty. - * - If your `for await` consumes slower than the stream produces, - * for example because you are relaying messages in a slow operation, - * messages are queued. - */ - [Symbol.asyncIterator]() { - if (!this._itState) { - this._itState = { q: [] }; + }); + } + __name(getDecompressionProgram, "getDecompressionProgram"); + function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --long=30 --force -o", + cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), + constants_1.TarFilename + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --force -o", + cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), + constants_1.TarFilename + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; + default: + return ["-z"]; } - if (this._closed === true) - this.pushIt({ value: null, done: true }); - else if (this._closed !== false) - this.pushIt(this._closed); - return { - next: /* @__PURE__ */ __name(() => { - let state = this._itState; - runtime_1.assert(state, "bad state"); - runtime_1.assert(!state.p, "iterator contract broken"); - let first = state.q.shift(); - if (first) - return "value" in first ? Promise.resolve(first) : Promise.reject(first); - state.p = new deferred_1.Deferred(); - return state.p.promise; - }, "next") - }; - } - // "push" a new iterator result. - // this either resolves a pending promise, or enqueues the result. - pushIt(result) { - let state = this._itState; - if (!state) - return; - if (state.p) { - const p = state.p; - runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); - "value" in result ? p.resolve(result) : p.reject(result); - delete state.p; - } else { - state.q.push(result); + }); + } + __name(getCompressionProgram, "getCompressionProgram"); + function execCommands(commands, cwd) { + return __awaiter3(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, void 0, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) + }); + } catch (error) { + throw new Error(`${command.split(" ")[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } } - } - }; - exports2.RpcOutputStreamController = RpcOutputStreamController; + }); + } + __name(execCommands, "execCommands"); + function listTar(archivePath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, "list", archivePath); + yield execCommands(commands); + }); + } + __name(listTar, "listTar"); + exports2.listTar = listTar; + function extractTar(archivePath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const workingDirectory = getWorkingDirectory(); + yield io2.mkdirP(workingDirectory); + const commands = yield getCommands(compressionMethod, "extract", archivePath); + yield execCommands(commands); + }); + } + __name(extractTar, "extractTar"); + exports2.extractTar = extractTar; + function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + (0, fs_1.writeFileSync)(path2.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + const commands = yield getCommands(compressionMethod, "create"); + yield execCommands(commands, archiveFolder); + }); + } + __name(createTar, "createTar"); + exports2.createTar = createTar; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js -var require_unary_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { +// ../node_modules/@actions/cache/lib/cache.js +var require_cache3 = __commonJS({ + "../node_modules/@actions/cache/lib/cache.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -96021,203 +95082,434 @@ var require_unary_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UnaryCall = void 0; - var UnaryCall = class { + exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; + var core2 = __importStar3(require_core()); + var path2 = __importStar3(require("path")); + var utils = __importStar3(require_cacheUtils()); + var cacheHttpClient = __importStar3(require_cacheHttpClient()); + var cacheTwirpClient = __importStar3(require_cacheTwirpClient()); + var config_1 = require_config(); + var tar_1 = require_tar(); + var constants_1 = require_constants7(); + var ValidationError = class _ValidationError extends Error { static { - __name(this, "UnaryCall"); + __name(this, "ValidationError"); } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; + constructor(message) { + super(message); + this.name = "ValidationError"; + Object.setPrototypeOf(this, _ValidationError.prototype); } - /** - * If you are only interested in the final outcome of this call, - * you can await it to receive a `FinishedUnaryCall`. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + }; + exports2.ValidationError = ValidationError; + var ReserveCacheError = class _ReserveCacheError extends Error { + static { + __name(this, "ReserveCacheError"); } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - response, - status, - trailers - }; - }); + constructor(message) { + super(message); + this.name = "ReserveCacheError"; + Object.setPrototypeOf(this, _ReserveCacheError.prototype); } }; - exports2.UnaryCall = UnaryCall; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js -var require_server_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + exports2.ReserveCacheError = ReserveCacheError; + function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { + } + __name(checkPaths, "checkPaths"); + function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } + } + __name(checkKey, "checkKey"); + function isFeatureAvailable() { + return !!process.env["ACTIONS_CACHE_URL"]; + } + __name(isFeatureAvailable, "isFeatureAvailable"); + exports2.isFeatureAvailable = isFeatureAvailable; + function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core2.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + switch (cacheServiceVersion) { + case "v2": + return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + case "v1": + default: + return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + } + }); + } + __name(restoreCache, "restoreCache"); + exports2.restoreCache = restoreCache; + function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core2.debug("Resolved Keys:"); + core2.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ""; + try { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod, + enableCrossOsArchive + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return void 0; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core2.info("Lookup only - skipping download"); + return cacheEntry.cacheKey; + } + archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core2.info("Cache restored successfully"); + return cacheEntry.cacheKey; + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else { + core2.warning(`Failed to restore: ${error.message}`); + } + } finally { try { - step(generator.next(value)); - } catch (e) { - reject(e); + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); } } - __name(fulfilled, "fulfilled"); - function rejected(value) { + return void 0; + }); + } + __name(restoreCacheV1, "restoreCacheV1"); + function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core2.debug("Resolved Keys:"); + core2.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + let archivePath = ""; + try { + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + const compressionMethod = yield utils.getCompressionMethod(); + const request = { + key: primaryKey, + restoreKeys, + version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) + }; + const response = yield twirpClient.GetCacheEntryDownloadURL(request); + if (!response.ok) { + core2.warning(`Cache not found for keys: ${keys.join(", ")}`); + return void 0; + } + core2.info(`Cache hit for: ${request.key}`); + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core2.info("Lookup only - skipping download"); + return response.matchedKey; + } + archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive path: ${archivePath}`); + core2.debug(`Starting download of archive to: ${archivePath}`); + yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core2.info("Cache restored successfully"); + return response.matchedKey; + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else { + core2.warning(`Failed to restore: ${error.message}`); + } + } finally { try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + if (archivePath) { + yield utils.unlinkFile(archivePath); + } + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + return void 0; + }); + } + __name(restoreCacheV2, "restoreCacheV2"); + function saveCache(paths, key, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core2.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + checkKey(key); + switch (cacheServiceVersion) { + case "v2": + return yield saveCacheV2(paths, key, options, enableCrossOsArchive); + case "v1": + default: + return yield saveCacheV1(paths, key, options, enableCrossOsArchive); } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerStreamingCall = void 0; - var ServerStreamingCall = class { - static { - __name(this, "ServerStreamingCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * You should first setup some listeners to the `request` to - * see the actual messages the server replied with. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - status, - trailers - }; - }); - } - }; - exports2.ServerStreamingCall = ServerStreamingCall; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js -var require_client_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { + } + __name(saveCache, "saveCache"); + exports2.saveCache = saveCache; + function saveCacheV1(paths, key, options, enableCrossOsArchive = false) { + var _a, _b, _c, _d, _e; + return __awaiter3(this, void 0, void 0, function* () { + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core2.debug("Cache Paths:"); + core2.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core2.debug("Reserving Cache"); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + enableCrossOsArchive, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core2.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else if (typedError.name === ReserveCacheError.name) { + core2.info(`Failed to save: ${typedError.message}`); + } else { + core2.warning(`Failed to save: ${typedError.message}`); + } + } finally { try { - step(generator.next(value)); - } catch (e) { - reject(e); + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); + } + __name(saveCacheV1, "saveCacheV1"); + function saveCacheV2(paths, key, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); + const compressionMethod = yield utils.getCompressionMethod(); + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core2.debug("Cache Paths:"); + core2.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + options.archiveSizeBytes = archiveFileSize; + core2.debug("Reserving Cache"); + const version3 = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); + const request = { + key, + version: version3 + }; + const response = yield twirpClient.CreateCacheEntry(request); + if (!response.ok) { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { + core2.debug(`Attempting to upload cache located at: ${archivePath}`); + yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options); + const finalizeRequest = { + key, + version: version3, + sizeBytes: `${archiveFileSize}` + }; + const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); + core2.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + if (!finalizeResponse.ok) { + throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); + } + cacheId = parseInt(finalizeResponse.entryId); + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else if (typedError.name === ReserveCacheError.name) { + core2.info(`Failed to save: ${typedError.message}`); + } else { + core2.warning(`Failed to save: ${typedError.message}`); + } + } finally { try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); + return cacheId; }); - }; + } + __name(saveCacheV2, "saveCacheV2"); + } +}); + +// ../node_modules/@actions/github/lib/context.js +var require_context2 = __commonJS({ + "../node_modules/@actions/github/lib/context.js"(exports2) { + "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ClientStreamingCall = void 0; - var ClientStreamingCall = class { + exports2.Context = void 0; + var fs_1 = require("fs"); + var os_1 = require("os"); + var Context = class { static { - __name(this, "ClientStreamingCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; + __name(this, "Context"); } /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. + * Hydrate the context from the environment */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); + } else { + const path2 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path2} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); + return { owner, repo }; + } + if (this.payload.repository) { return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - response, - status, - trailers + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name }; - }); + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } }; - exports2.ClientStreamingCall = ClientStreamingCall; + exports2.Context = Context; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js -var require_duplex_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { +// ../node_modules/@actions/github/lib/internal/utils.js +var require_utils5 = __commonJS({ + "../node_modules/@actions/github/lib/internal/utils.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -96250,2672 +95542,3930 @@ var require_duplex_streaming_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DuplexStreamingCall = void 0; - var DuplexStreamingCall = class { - static { - __name(this, "DuplexStreamingCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - status, - trailers - }; - }); + exports2.getApiBaseUrl = exports2.getProxyFetch = exports2.getProxyAgentDispatcher = exports2.getProxyAgent = exports2.getAuthString = void 0; + var httpClient = __importStar3(require_lib()); + var undici_1 = require_undici(); + function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error("Parameter token or opts.auth is required"); + } else if (token && options.auth) { + throw new Error("Parameters token and opts.auth may not both be specified"); } - }; - exports2.DuplexStreamingCall = DuplexStreamingCall; + return typeof options.auth === "string" ? options.auth : `token ${token}`; + } + __name(getAuthString, "getAuthString"); + exports2.getAuthString = getAuthString; + function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); + } + __name(getProxyAgent, "getProxyAgent"); + exports2.getProxyAgent = getProxyAgent; + function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); + } + __name(getProxyAgentDispatcher, "getProxyAgentDispatcher"); + exports2.getProxyAgentDispatcher = getProxyAgentDispatcher; + function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = /* @__PURE__ */ __name((url, opts) => __awaiter3(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }), "proxyFetch"); + return proxyFetch; + } + __name(getProxyFetch, "getProxyFetch"); + exports2.getProxyFetch = getProxyFetch; + function getApiBaseUrl() { + return process.env["GITHUB_API_URL"] || "https://api.github.com"; + } + __name(getApiBaseUrl, "getApiBaseUrl"); + exports2.getApiBaseUrl = getApiBaseUrl; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js -var require_test_transport = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { +// ../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js +var require_dist_node = __commonJS({ + "../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js"(exports2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TestTransport = void 0; - var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs7(); - var rpc_output_stream_1 = require_rpc_output_stream(); - var rpc_options_1 = require_rpc_options(); - var unary_call_1 = require_unary_call(); - var server_streaming_call_1 = require_server_streaming_call(); - var client_streaming_call_1 = require_client_streaming_call(); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - var TestTransport = class _TestTransport { - static { - __name(this, "TestTransport"); + function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; } - /** - * Initialize with mock data. Omitted fields have default value. - */ - constructor(data) { - this.suppressUncaughtRejections = true; - this.headerDelay = 10; - this.responseDelay = 50; - this.betweenResponseDelay = 10; - this.afterResponseDelay = 10; - this.data = data !== null && data !== void 0 ? data : {}; + if (typeof process === "object" && process.version !== void 0) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } - /** - * Sent message(s) during the last operation. - */ - get sentMessages() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.sent; - } else if (typeof this.lastInput == "object") { - return [this.lastInput.single]; - } - return []; + return ""; + } + __name(getUserAgent, "getUserAgent"); + exports2.getUserAgent = getUserAgent; + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js +var require_register = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js"(exports2, module2) { + module2.exports = register; + function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); } - /** - * Sending message(s) completed? - */ - get sendComplete() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.completed; - } else if (typeof this.lastInput == "object") { - return true; - } - return false; + if (!options) { + options = {}; } - // Creates a promise for response headers from the mock data. - promiseHeaders() { - var _a; - const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; - return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); + if (Array.isArray(name)) { + return name.reverse().reduce(function(callback, name2) { + return register.bind(null, state, name2, callback, options); + }, method)(); } - // Creates a promise for a single, valid, message from the mock data. - promiseSingleResponse(method) { - if (this.data.response instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.response); - } - let r; - if (Array.isArray(this.data.response)) { - runtime_1.assert(this.data.response.length > 0); - r = this.data.response[0]; - } else if (this.data.response !== void 0) { - r = this.data.response; - } else { - r = method.O.create(); + return Promise.resolve().then(function() { + if (!state.registry[name]) { + return method(options); } - runtime_1.assert(method.O.is(r)); - return Promise.resolve(r); - } - /** - * Pushes response messages from the mock data to the output stream. - * If an error response, status or trailers are mocked, the stream is - * closed with the respective error. - * Otherwise, stream is completed successfully. - * - * The returned promise resolves when the stream is closed. It should - * not reject. If it does, code is broken. - */ - streamResponses(method, stream, abort) { - return __awaiter3(this, void 0, void 0, function* () { - const messages = []; - if (this.data.response === void 0) { - messages.push(method.O.create()); - } else if (Array.isArray(this.data.response)) { - for (let msg of this.data.response) { - runtime_1.assert(method.O.is(msg)); - messages.push(msg); - } - } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { - runtime_1.assert(method.O.is(this.data.response)); - messages.push(this.data.response); - } - try { - yield delay(this.responseDelay, abort)(void 0); - } catch (error) { - stream.notifyError(error); - return; - } - if (this.data.response instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.response); - return; - } - for (let msg of messages) { - stream.notifyMessage(msg); - try { - yield delay(this.betweenResponseDelay, abort)(void 0); - } catch (error) { - stream.notifyError(error); - return; - } - } - if (this.data.status instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.status); - return; - } - if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.trailers); - return; - } - stream.notifyComplete(); - }); + return state.registry[name].reduce(function(method2, registered) { + return registered.hook.bind(null, method2, options); + }, method)(); + }); + } + __name(register, "register"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js +var require_add = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js"(exports2, module2) { + module2.exports = addHook; + function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; } - // Creates a promise for response status from the mock data. - promiseStatus() { - var _a; - const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; - return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); + if (kind === "before") { + hook = /* @__PURE__ */ __name(function(method, options) { + return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); + }, "hook"); } - // Creates a promise for response trailers from the mock data. - promiseTrailers() { - var _a; - const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; - return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); + if (kind === "after") { + hook = /* @__PURE__ */ __name(function(method, options) { + var result; + return Promise.resolve().then(method.bind(null, options)).then(function(result_) { + result = result_; + return orig(result, options); + }).then(function() { + return result; + }); + }, "hook"); } - maybeSuppressUncaught(...promise) { - if (this.suppressUncaughtRejections) { - for (let p of promise) { - p.catch(() => { - }); - } - } + if (kind === "error") { + hook = /* @__PURE__ */ __name(function(method, options) { + return Promise.resolve().then(method.bind(null, options)).catch(function(error) { + return orig(error, options); + }); + }, "hook"); } - mergeOptions(options) { - return rpc_options_1.mergeRpcOptions({}, options); + state.registry[name].push({ + hook, + orig + }); + } + __name(addHook, "addHook"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js +var require_remove = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js"(exports2, module2) { + module2.exports = removeHook; + function removeHook(state, name, method) { + if (!state.registry[name]) { + return; } - unary(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { - }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + var index = state.registry[name].map(function(registered) { + return registered.orig; + }).indexOf(method); + if (index === -1) { + return; } - serverStreaming(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + state.registry[name].splice(index, 1); + } + __name(removeHook, "removeHook"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/index.js +var require_before_after_hook = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/index.js"(exports2, module2) { + var register = require_register(); + var addHook = require_add(); + var removeHook = require_remove(); + var bind = Function.bind; + var bindable = bind.bind(bind); + function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function(kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); + } + __name(bindApi, "bindApi"); + function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {} + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; + } + __name(HookSingular, "HookSingular"); + function HookCollection() { + var state = { + registry: {} + }; + var hook = register.bind(null, state); + bindApi(hook, state); + return hook; + } + __name(HookCollection, "HookCollection"); + var collectionHookDeprecationMessageDisplayed = false; + function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; } - clientStreaming(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { - }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + return HookCollection(); + } + __name(Hook, "Hook"); + Hook.Singular = HookSingular.bind(); + Hook.Collection = HookCollection.bind(); + module2.exports = Hook; + module2.exports.Hook = Hook; + module2.exports.Singular = Hook.Singular; + module2.exports.Collection = Hook.Collection; + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js +var require_dist_node2 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - duplex(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + endpoint: /* @__PURE__ */ __name(() => endpoint, "endpoint") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "9.0.5"; + var userAgent = `octokit-endpoint.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; + var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" } }; - exports2.TestTransport = TestTransport; - TestTransport.defaultHeaders = { - responseHeader: "test" - }; - TestTransport.defaultStatus = { - code: "OK", - detail: "all good" - }; - TestTransport.defaultTrailers = { - responseTrailer: "test" - }; - function delay(ms, abort) { - return (v) => new Promise((resolve, reject) => { - if (abort === null || abort === void 0 ? void 0 : abort.aborted) { - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); + } + __name(lowercaseKeys, "lowercaseKeys"); + function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); + } + __name(isPlainObject, "isPlainObject"); + function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); } else { - const id = setTimeout(() => resolve(v), ms); - if (abort) { - abort.addEventListener("abort", (ev) => { - clearTimeout(id); - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - }); - } + Object.assign(result, { [key]: options[key] }); } }); + return result; } - __name(delay, "delay"); - var TestInputStream = class { - static { - __name(this, "TestInputStream"); - } - constructor(data, abort) { - this._completed = false; - this._sent = []; - this.data = data; - this.abort = abort; - } - get sent() { - return this._sent; - } - get completed() { - return this._completed; - } - send(message) { - if (this.data.inputMessage instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputMessage); + __name(mergeDeep, "mergeDeep"); + function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; } - const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; - return Promise.resolve(void 0).then(() => { - this._sent.push(message); - }).then(delay(delayMs, this.abort)); } - complete() { - if (this.data.inputComplete instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputComplete); - } - const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; - return Promise.resolve(void 0).then(() => { - this._completed = true; - }).then(delay(delayMs, this.abort)); + return obj; + } + __name(removeUndefinedProperties, "removeUndefinedProperties"); + function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); } - }; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js -var require_rpc_interceptor = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs7(); - function stackIntercept(kind, transport, method, options, input) { - var _a, _b, _c, _d; - if (kind == "unary") { - let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.unary(mtd, inp, opt), "tail"); - for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt), "tail"); + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); } - return tail(method, input, options); + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } - if (kind == "serverStreaming") { - let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt), "tail"); - for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt), "tail"); + return mergedOptions; + } + __name(merge, "merge"); + function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } - return tail(method, input, options); + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); + } + __name(addQueryParameters, "addQueryParameters"); + var urlVariableRegex = /\{[^}]+\}/g; + function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); + } + __name(removeNonChars, "removeNonChars"); + function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; } - if (kind == "clientStreaming") { - let tail = /* @__PURE__ */ __name((mtd, opt) => transport.clientStreaming(mtd, opt), "tail"); - for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptClientStreaming(next, mtd, opt), "tail"); + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); + } + __name(extractUrlVariableNames, "extractUrlVariableNames"); + function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; } - return tail(method, options); } - if (kind == "duplex") { - let tail = /* @__PURE__ */ __name((mtd, opt) => transport.duplex(mtd, opt), "tail"); - for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptDuplex(next, mtd, opt), "tail"); + return result; + } + __name(omit, "omit"); + function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } - return tail(method, options); + return part; + }).join(""); + } + __name(encodeReserved, "encodeReserved"); + function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); + } + __name(encodeUnreserved, "encodeUnreserved"); + function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; } - runtime_1.assertNever(kind); } - __name(stackIntercept, "stackIntercept"); - exports2.stackIntercept = stackIntercept; - function stackUnaryInterceptors(transport, method, input, options) { - return stackIntercept("unary", transport, method, options, input); + __name(encodeValue, "encodeValue"); + function isDefined(value) { + return value !== void 0 && value !== null; } - __name(stackUnaryInterceptors, "stackUnaryInterceptors"); - exports2.stackUnaryInterceptors = stackUnaryInterceptors; - function stackServerStreamingInterceptors(transport, method, input, options) { - return stackIntercept("serverStreaming", transport, method, options, input); + __name(isDefined, "isDefined"); + function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; } - __name(stackServerStreamingInterceptors, "stackServerStreamingInterceptors"); - exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; - function stackClientStreamingInterceptors(transport, method, options) { - return stackIntercept("clientStreaming", transport, method, options); + __name(isKeyOperator, "isKeyOperator"); + function getValues(context2, operator, key, modifier) { + var value = context2[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; } - __name(stackClientStreamingInterceptors, "stackClientStreamingInterceptors"); - exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; - function stackDuplexStreamingInterceptors(transport, method, options) { - return stackIntercept("duplex", transport, method, options); + __name(getValues, "getValues"); + function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; } - __name(stackDuplexStreamingInterceptors, "stackDuplexStreamingInterceptors"); - exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js -var require_server_call_context = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerCallContextController = void 0; - var ServerCallContextController = class { - static { - __name(this, "ServerCallContextController"); + __name(parseUrl, "parseUrl"); + function expand(template, context2) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_2, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); } - constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { - this._cancelled = false; - this._listeners = []; - this.method = method; - this.headers = headers; - this.deadline = deadline; - this.trailers = {}; - this._sendRH = sendResponseHeadersFn; - this.status = defaultStatus; + } + __name(expand, "expand"); + function parse2(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; } - /** - * Set the call cancelled. - * - * Invokes all callbacks registered with onCancel() and - * sets `cancelled = true`. - */ - notifyCancelled() { - if (!this._cancelled) { - this._cancelled = true; - for (let l of this._listeners) { - l(); + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); } } } - /** - * Send response headers. - */ - sendResponseHeaders(data) { - this._sendRH(data); + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } } - /** - * Is the call cancelled? - * - * When the client closes the connection before the server - * is done, the call is cancelled. - * - * If you want to cancel a request on the server, throw a - * RpcError with the CANCELLED status code. - */ - get cancelled() { - return this._cancelled; + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; } - /** - * Add a callback for cancellation. - */ - onCancel(callback) { - const l = this._listeners; - l.push(callback); - return () => { - let i = l.indexOf(callback); - if (i >= 0) - l.splice(i, 1); - }; + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; } - }; - exports2.ServerCallContextController = ServerCallContextController; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs8 = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var service_type_1 = require_service_type(); - Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return service_type_1.ServiceType; - }, "get") }); - var reflection_info_1 = require_reflection_info2(); - Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMethodOptions; - }, "get") }); - Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMethodOption; - }, "get") }); - Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readServiceOption; - }, "get") }); - var rpc_error_1 = require_rpc_error(); - Object.defineProperty(exports2, "RpcError", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_error_1.RpcError; - }, "get") }); - var rpc_options_1 = require_rpc_options(); - Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_options_1.mergeRpcOptions; - }, "get") }); - var rpc_output_stream_1 = require_rpc_output_stream(); - Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_output_stream_1.RpcOutputStreamController; - }, "get") }); - var test_transport_1 = require_test_transport(); - Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return test_transport_1.TestTransport; - }, "get") }); - var deferred_1 = require_deferred(); - Object.defineProperty(exports2, "Deferred", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return deferred_1.Deferred; - }, "get") }); - Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return deferred_1.DeferredState; - }, "get") }); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return duplex_streaming_call_1.DuplexStreamingCall; - }, "get") }); - var client_streaming_call_1 = require_client_streaming_call(); - Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return client_streaming_call_1.ClientStreamingCall; - }, "get") }); - var server_streaming_call_1 = require_server_streaming_call(); - Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return server_streaming_call_1.ServerStreamingCall; - }, "get") }); - var unary_call_1 = require_unary_call(); - Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return unary_call_1.UnaryCall; - }, "get") }); - var rpc_interceptor_1 = require_rpc_interceptor(); - Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackIntercept; - }, "get") }); - Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackDuplexStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackClientStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackServerStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackUnaryInterceptors; - }, "get") }); - var server_call_context_1 = require_server_call_context(); - Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return server_call_context_1.ServerCallContextController; - }, "get") }); + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); + } + __name(parse2, "parse"); + function endpointWithDefaults(defaults, route, options) { + return parse2(merge(defaults, route, options)); + } + __name(endpointWithDefaults, "endpointWithDefaults"); + function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse: parse2 + }); + } + __name(withDefaults, "withDefaults"); + var endpoint = withDefaults(null, DEFAULTS); } }); -// ../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js -var require_timestamp = __commonJS({ - "../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js"(exports2) { +// ../node_modules/deprecation/dist-node/index.js +var require_dist_node3 = __commonJS({ + "../node_modules/deprecation/dist-node/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Timestamp = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var runtime_6 = require_commonjs7(); - var runtime_7 = require_commonjs7(); - var Timestamp$Type = class extends runtime_7.MessageType { + var Deprecation = class extends Error { static { - __name(this, "Timestamp$Type"); - } - constructor() { - super("google.protobuf.Timestamp", [ - { - no: 1, - name: "seconds", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 2, - name: "nanos", - kind: "scalar", - T: 5 - /*ScalarType.INT32*/ - } - ]); - } - /** - * Creates a new `Timestamp` for the current time. - */ - now() { - const msg = this.create(); - const ms = Date.now(); - msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); - msg.nanos = ms % 1e3 * 1e6; - return msg; - } - /** - * Converts a `Timestamp` to a JavaScript Date. - */ - toDate(message) { - return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1e3 + Math.ceil(message.nanos / 1e6)); - } - /** - * Converts a JavaScript Date to a `Timestamp`. - */ - fromDate(date) { - const msg = this.create(); - const ms = date.getTime(); - msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); - msg.nanos = ms % 1e3 * 1e6; - return msg; - } - /** - * In JSON format, the `Timestamp` type is encoded as a string - * in the RFC 3339 format. - */ - internalJsonWrite(message, options) { - let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1e3; - if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) - throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); - if (message.nanos < 0) - throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); - let z = "Z"; - if (message.nanos > 0) { - let nanosStr = (message.nanos + 1e9).toString().substring(1); - if (nanosStr.substring(3) === "000000") - z = "." + nanosStr.substring(0, 3) + "Z"; - else if (nanosStr.substring(6) === "000") - z = "." + nanosStr.substring(0, 6) + "Z"; - else - z = "." + nanosStr + "Z"; - } - return new Date(ms).toISOString().replace(".000Z", z); - } - /** - * In JSON format, the `Timestamp` type is encoded as a string - * in the RFC 3339 format. - */ - internalJsonRead(json, options, target) { - if (typeof json !== "string") - throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); - let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); - if (!matches) - throw new Error("Unable to parse Timestamp from JSON. Invalid format."); - let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); - if (Number.isNaN(ms)) - throw new Error("Unable to parse Timestamp from JSON. Invalid value."); - if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) - throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); - if (!target) - target = this.create(); - target.seconds = runtime_6.PbLong.from(ms / 1e3).toString(); - target.nanos = 0; - if (matches[7]) - target.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1e9; - return target; - } - create(value) { - const message = { seconds: "0", nanos: 0 }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + __name(this, "Deprecation"); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 seconds */ - 1: - message.seconds = reader.int64().toString(); - break; - case /* int32 nanos */ - 2: - message.nanos = reader.int32(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + constructor(message) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.seconds !== "0") - writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); - if (message.nanos !== 0) - writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + this.name = "Deprecation"; } }; - exports2.Timestamp = new Timestamp$Type(); + exports2.Deprecation = Deprecation; } }); -// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js -var require_cacheentry = __commonJS({ - "../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheEntry = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var timestamp_1 = require_timestamp(); - var CacheEntry$Type = class extends runtime_5.MessageType { - static { - __name(this, "CacheEntry$Type"); - } - constructor() { - super("github.actions.results.entities.v1.CacheEntry", [ - { - no: 1, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "hash", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "size_bytes", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 4, - name: "scope", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 5, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { no: 6, name: "created_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, - { no: 7, name: "last_accessed_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, - { no: 8, name: "expires_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") } - ]); - } - create(value) { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ - 1: - message.key = reader.string(); - break; - case /* string hash */ - 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ - 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ - 4: - message.scope = reader.string(); - break; - case /* string version */ - 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ - 6: - message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ - 7: - message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ - 8: - message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } +// ../node_modules/wrappy/wrappy.js +var require_wrappy = __commonJS({ + "../node_modules/wrappy/wrappy.js"(exports2, module2) { + module2.exports = wrappy; + function wrappy(fn, cb) { + if (fn && cb) return wrappy(fn)(cb); + if (typeof fn !== "function") + throw new TypeError("need wrapper function"); + Object.keys(fn).forEach(function(k) { + wrapper[k] = fn[k]; + }); + return wrapper; + function wrapper() { + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.key !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.hash !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash); - if (message.sizeBytes !== "0") - writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); - if (message.scope !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope); - if (message.version !== "") - writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version); - if (message.createdAt) - timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.lastAccessedAt) - timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.expiresAt) - timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + var ret = fn.apply(this, args); + var cb2 = args[args.length - 1]; + if (typeof ret === "function" && ret !== cb2) { + Object.keys(cb2).forEach(function(k) { + ret[k] = cb2[k]; + }); + } + return ret; } - }; - exports2.CacheEntry = new CacheEntry$Type(); + __name(wrapper, "wrapper"); + } + __name(wrappy, "wrappy"); } }); -// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js -var require_cachescope = __commonJS({ - "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { +// ../node_modules/once/once.js +var require_once = __commonJS({ + "../node_modules/once/once.js"(exports2, module2) { + var wrappy = require_wrappy(); + module2.exports = wrappy(once); + module2.exports.strict = wrappy(onceStrict); + once.proto = once(function() { + Object.defineProperty(Function.prototype, "once", { + value: /* @__PURE__ */ __name(function() { + return once(this); + }, "value"), + configurable: true + }); + Object.defineProperty(Function.prototype, "onceStrict", { + value: /* @__PURE__ */ __name(function() { + return onceStrict(this); + }, "value"), + configurable: true + }); + }); + function once(fn) { + var f = /* @__PURE__ */ __name(function() { + if (f.called) return f.value; + f.called = true; + return f.value = fn.apply(this, arguments); + }, "f"); + f.called = false; + return f; + } + __name(once, "once"); + function onceStrict(fn) { + var f = /* @__PURE__ */ __name(function() { + if (f.called) + throw new Error(f.onceError); + f.called = true; + return f.value = fn.apply(this, arguments); + }, "f"); + var name = fn.name || "Function wrapped with `once`"; + f.onceError = name + " shouldn't be called more than once"; + f.called = false; + return f; + } + __name(onceStrict, "onceStrict"); + } +}); + +// ../node_modules/@octokit/request-error/dist-node/index.js +var require_dist_node4 = __commonJS({ + "../node_modules/@octokit/request-error/dist-node/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheScope = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var CacheScope$Type = class extends runtime_5.MessageType { + var __create2 = Object.create; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __getProtoOf2 = Object.getPrototypeOf; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, + mod + )), "__toESM"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + RequestError: /* @__PURE__ */ __name(() => RequestError, "RequestError") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_deprecation = require_dist_node3(); + var import_once = __toESM2(require_once()); + var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var RequestError = class extends Error { static { - __name(this, "CacheScope$Type"); + __name(this, "RequestError"); } - constructor() { - super("github.actions.results.entities.v1.CacheScope", [ - { - no: 1, - name: "scope", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "permission", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; } - ]); - } - create(value) { - const message = { scope: "", permission: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string scope */ - 1: - message.scope = reader.string(); - break; - case /* int64 permission */ - 2: - message.permission = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.scope !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); - if (message.permission !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + }); } }; - exports2.CacheScope = new CacheScope$Type(); } }); -// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js -var require_cachemetadata = __commonJS({ - "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { +// ../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js +var require_dist_node5 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheMetadata = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var cachescope_1 = require_cachescope(); - var CacheMetadata$Type = class extends runtime_5.MessageType { - static { - __name(this, "CacheMetadata$Type"); + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - constructor() { - super("github.actions.results.entities.v1.CacheMetadata", [ - { - no: 1, - name: "repository_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { no: 2, name: "scope", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cachescope_1.CacheScope, "T") } - ]); + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + request: /* @__PURE__ */ __name(() => request, "request") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_endpoint = require_dist_node2(); + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "8.4.0"; + function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); + } + __name(isPlainObject, "isPlainObject"); + var import_request_error = require_dist_node4(); + function getBufferResponse(response) { + return response.arrayBuffer(); + } + __name(getBufferResponse, "getBufferResponse"); + function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); } - create(value) { - const message = { repositoryId: "0", scope: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + let headers = {}; + let status; + let url; + let { fetch: fetch2 } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch2 = requestOptions.request.fetch; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 repository_id */ - 1: - message.repositoryId = reader.int64().toString(); - break; - case /* repeated github.actions.results.entities.v1.CacheScope scope */ - 2: - message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + if (!fetch2) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch2(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.repositoryId !== "0") - writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); - for (let i = 0; i < message.scope.length; i++) - cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); + } + __name(fetchWrapper, "fetchWrapper"); + async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); } - }; - exports2.CacheMetadata = new CacheMetadata$Type(); - } -}); - -// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js -var require_cache2 = __commonJS({ - "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheService = exports2.LookupCacheEntryResponse = exports2.LookupCacheEntryRequest = exports2.ListCacheEntriesResponse = exports2.ListCacheEntriesRequest = exports2.DeleteCacheEntryResponse = exports2.DeleteCacheEntryRequest = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; - var runtime_rpc_1 = require_commonjs8(); - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var cacheentry_1 = require_cacheentry(); - var cachemetadata_1 = require_cachemetadata(); - var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { - static { - __name(this, "CreateCacheEntryRequest$Type"); + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); } - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); + return getBufferResponse(response); + } + __name(getResponseData, "getResponseData"); + function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; } - create(value) { - const message = { key: "", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* string version */ - 3: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + return `Unknown error: ${JSON.stringify(data)}`; + } + __name(toErrorMessage, "toErrorMessage"); + function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = /* @__PURE__ */ __name(function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); } - return message; + const request2 = /* @__PURE__ */ __name((route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }, "request2"); + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }, "newApi"); + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + } + __name(withDefaults, "withDefaults"); + var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.version !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + }); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js +var require_dist_node6 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - }; - exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); - var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + GraphqlResponseError: /* @__PURE__ */ __name(() => GraphqlResponseError, "GraphqlResponseError"), + graphql: /* @__PURE__ */ __name(() => graphql2, "graphql"), + withCustomRequest: /* @__PURE__ */ __name(() => withCustomRequest, "withCustomRequest") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_request3 = require_dist_node5(); + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "7.1.0"; + var import_request2 = require_dist_node5(); + var import_request = require_dist_node5(); + function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); + } + __name(_buildMessageForResponseErrors, "_buildMessageForResponseErrors"); + var GraphqlResponseError = class extends Error { static { - __name(this, "CreateCacheEntryResponse$Type"); - } - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_upload_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, signedUploadUrl: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + __name(this, "GraphqlResponseError"); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_upload_url */ - 2: - message.signedUploadUrl = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedUploadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; } }; - exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); - var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { - static { - __name(this, "FinalizeCacheEntryUploadRequest$Type"); - } - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "size_bytes", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); + var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" + ]; + var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; + var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; + function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } } - create(value) { - const message = { key: "", sizeBytes: "0", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* int64 size_bytes */ - 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.sizeBytes !== "0") - writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return response.data.data; + }); + } + __name(graphql, "graphql"); + function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = /* @__PURE__ */ __name((query, options) => { + return graphql(newRequest, query, options); + }, "newApi"); + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); + } + __name(withDefaults, "withDefaults"); + var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" + }); + function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); + } + __name(withCustomRequest, "withCustomRequest"); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js +var require_dist_node7 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - }; - exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); - var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { - static { - __name(this, "FinalizeCacheEntryUploadResponse$Type"); + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + createTokenAuth: /* @__PURE__ */ __name(() => createTokenAuth, "createTokenAuth") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; + var REGEX_IS_INSTALLATION = /^ghs_/; + var REGEX_IS_USER_TO_SERVER = /^ghu_/; + async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; + } + __name(auth, "auth"); + function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "entry_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - } - ]); + return `token ${token}`; + } + __name(withAuthorizationPrefix, "withAuthorizationPrefix"); + async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); + } + __name(hook, "hook"); + var createTokenAuth = /* @__PURE__ */ __name(function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - create(value) { - const message = { ok: false, entryId: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* int64 entry_id */ - 2: - message.entryId = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); + }, "createTokenAuth2"); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js +var require_dist_node8 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.entryId !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + Octokit: /* @__PURE__ */ __name(() => Octokit, "Octokit") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_universal_user_agent = require_dist_node(); + var import_before_after_hook = require_before_after_hook(); + var import_request = require_dist_node5(); + var import_graphql = require_dist_node6(); + var import_auth_token = require_dist_node7(); + var VERSION3 = "5.2.0"; + var noop = /* @__PURE__ */ __name(() => { + }, "noop"); + var consoleWarn = console.warn.bind(console); + var consoleError = console.error.bind(console); + var userAgentTrail = `octokit-core.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; + var Octokit = class { + static { + __name(this, "Octokit"); } - }; - exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); - var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "GetCacheEntryDownloadURLRequest$Type"); + this.VERSION = VERSION3; } - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "restore_keys", - kind: "scalar", - repeat: 2, - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + static { + __name(this, "OctokitWithDefaults"); } - ]); - } - create(value) { - const message = { key: "", restoreKeys: [], version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ - 3: - message.restoreKeys.push(reader.string()); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + }; + return OctokitWithDefaults; } - }; - exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); - var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "GetCacheEntryDownloadURLResponse$Type"); + this.plugins = []; } - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_download_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "matched_key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + __name(this, "NewOctokit"); } - ]); - } - create(value) { - const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_download_url */ - 2: - message.signedDownloadUrl = reader.string(); - break; - case /* string matched_key */ - 3: - message.matchedKey = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedDownloadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); - if (message.matchedKey !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; } }; - exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); - var DeleteCacheEntryRequest$Type = class extends runtime_5.MessageType { - static { - __name(this, "DeleteCacheEntryRequest$Type"); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js +var require_dist_node9 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - constructor() { - super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + legacyRestEndpointMethods: /* @__PURE__ */ __name(() => legacyRestEndpointMethods, "legacyRestEndpointMethods"), + restEndpointMethods: /* @__PURE__ */ __name(() => restEndpointMethods, "restEndpointMethods") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var VERSION3 = "10.4.1"; + var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + ], + setCustomLabelsForSelfHostedRunnerForRepo: [ + "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + setCustomOidcSubClaimForRepo: [ + "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + setGithubActionsDefaultWorkflowPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/workflow" + ], + setGithubActionsDefaultWorkflowPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/workflow" + ], + setGithubActionsPermissionsOrganization: [ + "PUT /orgs/{org}/actions/permissions" + ], + setGithubActionsPermissionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories" + ], + setSelectedRepositoriesEnabledGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories" + ], + setWorkflowAccessToRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/access" + ], + updateEnvironmentVariable: [ + "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], + updateRepoVariable: [ + "PATCH /repos/{owner}/{repo}/actions/variables/{name}" + ] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: [ + "DELETE /notifications/threads/{thread_id}/subscription" + ], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: [ + "GET /notifications/threads/{thread_id}/subscription" + ], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: [ + "GET /users/{username}/events/orgs/{org}" + ], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: [ + "GET /users/{username}/received_events/public" + ], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/notifications" + ], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: [ + "PUT /notifications/threads/{thread_id}/subscription" + ], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } + ], + addRepoToInstallationForAuthenticatedUser: [ + "PUT /user/installations/{installation_id}/repositories/{repository_id}" + ], + checkToken: ["POST /applications/{client_id}/token"], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: [ + "POST /app/installations/{installation_id}/access_tokens" + ], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: [ + "GET /marketplace_listing/accounts/{account_id}" + ], + getSubscriptionPlanForAccountStubbed: [ + "GET /marketplace_listing/stubbed/accounts/{account_id}" + ], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: [ + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" + ], + listInstallationReposForAuthenticatedUser: [ + "GET /user/installations/{installation_id}/repositories" + ], + listInstallationRequestsForAuthenticatedApp: [ + "GET /app/installation-requests" + ], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: [ + "GET /user/marketplace_purchases/stubbed" + ], + listWebhookDeliveries: ["GET /app/hook/deliveries"], + redeliverWebhookDelivery: [ + "POST /app/hook/deliveries/{delivery_id}/attempts" + ], + removeRepoFromInstallation: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}", + {}, + { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } + ], + removeRepoFromInstallationForAuthenticatedUser: [ + "DELETE /user/installations/{installation_id}/repositories/{repository_id}" + ], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: [ + "DELETE /app/installations/{installation_id}/suspended" + ], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: [ + "GET /users/{username}/settings/billing/actions" + ], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: [ + "GET /users/{username}/settings/billing/packages" + ], + getSharedStorageBillingOrg: [ + "GET /orgs/{org}/settings/billing/shared-storage" + ], + getSharedStorageBillingUser: [ + "GET /users/{username}/settings/billing/shared-storage" + ] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: [ + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" + ], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: [ + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" + ], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestRun: [ + "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" + ], + rerequestSuite: [ + "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" + ], + setSuitesPreferences: [ + "PATCH /repos/{owner}/{repo}/check-suites/preferences" + ], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: [ + "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" + ], + getAlert: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", + {}, + { renamedParameters: { alert_id: "alert_number" } } + ], + getAnalysis: [ + "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" + ], + getCodeqlDatabase: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" + ], + getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" + ], + listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: [ + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + {}, + { renamed: ["codeScanning", "listAlertInstances"] } + ], + listCodeqlDatabases: [ + "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" + ], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" + ], + updateDefaultSetup: [ + "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" + ], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct"], + getConductCode: ["GET /codes_of_conduct/{key}"] + }, + codespaces: { + addRepositoryForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + checkPermissionsForDevcontainer: [ + "GET /repos/{owner}/{repo}/codespaces/permissions_check" + ], + codespaceMachinesForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/machines" + ], + createForAuthenticatedUser: ["POST /user/codespaces"], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + createOrUpdateSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}" + ], + createWithPrForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" + ], + createWithRepoForAuthenticatedUser: [ + "POST /repos/{owner}/{repo}/codespaces" + ], + deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], + deleteFromOrganization: [ + "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + deleteSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}" + ], + exportForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/exports" + ], + getCodespacesForUserInOrg: [ + "GET /orgs/{org}/members/{username}/codespaces" + ], + getExportDetailsForAuthenticatedUser: [ + "GET /user/codespaces/{codespace_name}/exports/{export_id}" + ], + getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], + getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], + getPublicKeyForAuthenticatedUser: [ + "GET /user/codespaces/secrets/public-key" + ], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" + ], + getSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}" + ], + listDevcontainersInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/devcontainers" + ], + listForAuthenticatedUser: ["GET /user/codespaces"], + listInOrganization: [ + "GET /orgs/{org}/codespaces", + {}, + { renamedParameters: { org_id: "org" } } + ], + listInRepositoryForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces" + ], + listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], + listRepositoriesForSecretForAuthenticatedUser: [ + "GET /user/codespaces/secrets/{secret_name}/repositories" + ], + listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + preFlightWithRepoForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/new" + ], + publishForAuthenticatedUser: [ + "POST /user/codespaces/{codespace_name}/publish" + ], + removeRepositoryForSecretForAuthenticatedUser: [ + "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" + ], + repoMachinesForAuthenticatedUser: [ + "GET /repos/{owner}/{repo}/codespaces/machines" + ], + setRepositoriesForSecretForAuthenticatedUser: [ + "PUT /user/codespaces/secrets/{secret_name}/repositories" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" + ], + startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], + stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], + stopInOrganization: [ + "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" + ], + updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] + }, + copilot: { + addCopilotSeatsForTeams: [ + "POST /orgs/{org}/copilot/billing/selected_teams" + ], + addCopilotSeatsForUsers: [ + "POST /orgs/{org}/copilot/billing/selected_users" + ], + cancelCopilotSeatAssignmentForTeams: [ + "DELETE /orgs/{org}/copilot/billing/selected_teams" + ], + cancelCopilotSeatAssignmentForUsers: [ + "DELETE /orgs/{org}/copilot/billing/selected_users" + ], + getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], + getCopilotSeatDetailsForUser: [ + "GET /orgs/{org}/members/{username}/copilot" + ], + listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] + }, + dependabot: { + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + createOrUpdateOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}" + ], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], + getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], + getRepoPublicKey: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" + ], + getRepoSecret: [ + "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/dependabot/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], + listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" + ], + setSelectedReposForOrgSecret: [ + "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" + ] + }, + dependencyGraph: { + createRepositorySnapshot: [ + "POST /repos/{owner}/{repo}/dependency-graph/snapshots" + ], + diffRange: [ + "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" + ], + exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] + }, + emojis: { get: ["GET /emojis"] }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: [ + "GET /user/interaction-limits", + {}, + { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } + ], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: [ + "DELETE /repos/{owner}/{repo}/interaction-limits" + ], + removeRestrictionsForYourPublicRepos: [ + "DELETE /user/interaction-limits", + {}, + { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } + ], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: [ + "PUT /user/interaction-limits", + {}, + { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } + ] + }, + issues: { + addAssignees: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + checkUserCanBeAssignedToIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" + ], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" + ], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" + ], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: [ + "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" + ], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" + ], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: [ + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" + ], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: [ + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" + ], + removeAssignees: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" + ], + removeLabel: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" + ], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: [ + "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" + ] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: [ + "POST /markdown/raw", + { headers: { "content-type": "text/plain; charset=utf-8" } } + ] + }, + meta: { + get: ["GET /meta"], + getAllVersions: ["GET /versions"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: [ + "DELETE /repos/{owner}/{repo}/import", + {}, { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); - var DeleteCacheEntryResponse$Type = class extends runtime_5.MessageType { - static { - __name(this, "DeleteCacheEntryResponse$Type"); - } - constructor() { - super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, + ], + deleteArchiveForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/archive" + ], + deleteArchiveForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/archive" + ], + downloadArchiveForOrg: [ + "GET /orgs/{org}/migrations/{migration_id}/archive" + ], + getArchiveForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/archive" + ], + getCommitAuthors: [ + "GET /repos/{owner}/{repo}/import/authors", + {}, { - no: 2, - name: "entry_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - } - ]); - } - create(value) { - const message = { ok: false, entryId: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* int64 entry_id */ - 2: - message.entryId = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.entryId !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); - var ListCacheEntriesRequest$Type = class extends runtime_5.MessageType { - static { - __name(this, "ListCacheEntriesRequest$Type"); - } - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, + ], + getImportStatus: [ + "GET /repos/{owner}/{repo}/import", + {}, { - no: 3, - name: "restore_keys", - kind: "scalar", - repeat: 2, - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "", restoreKeys: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ - 3: - message.restoreKeys.push(reader.string()); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); - var ListCacheEntriesResponse$Type = class extends runtime_5.MessageType { - static { - __name(this, "ListCacheEntriesResponse$Type"); - } - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesResponse", [ - { no: 1, name: "entries", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } - ]); - } - create(value) { - const message = { entries: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* repeated github.actions.results.entities.v1.CacheEntry entries */ - 1: - message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" } - } - return message; - } - internalBinaryWrite(message, writer, options) { - for (let i = 0; i < message.entries.length; i++) - cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); - var LookupCacheEntryRequest$Type = class extends runtime_5.MessageType { - static { - __name(this, "LookupCacheEntryRequest$Type"); - } - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, + ], + getLargeFiles: [ + "GET /repos/{owner}/{repo}/import/large_files", + {}, { - no: 3, - name: "restore_keys", - kind: "scalar", - repeat: 2, - T: 9 - /*ScalarType.STRING*/ - }, + deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" + } + ], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], + listForAuthenticatedUser: ["GET /user/migrations"], + listForOrg: ["GET /orgs/{org}/migrations"], + listReposForAuthenticatedUser: [ + "GET /user/migrations/{migration_id}/repositories" + ], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], + listReposForUser: [ + "GET /user/migrations/{migration_id}/repositories", + {}, + { renamed: ["migrations", "listReposForAuthenticatedUser"] } + ], + mapCommitAuthor: [ + "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", + {}, { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" } - ]); - } - create(value) { - const message = { key: "", restoreKeys: [], version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ - 3: - message.restoreKeys.push(reader.string()); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + ], + setLfsPreference: [ + "PATCH /repos/{owner}/{repo}/import/lfs", + {}, + { + deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); - var LookupCacheEntryResponse$Type = class extends runtime_5.MessageType { - static { - __name(this, "LookupCacheEntryResponse$Type"); - } - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryResponse", [ + ], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: [ + "PUT /repos/{owner}/{repo}/import", + {}, { - no: 1, - name: "exists", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { no: 2, name: "entry", kind: "message", T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } - ]); - } - create(value) { - const message = { exists: false }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool exists */ - 1: - message.exists = reader.bool(); - break; - case /* github.actions.results.entities.v1.CacheEntry entry */ - 2: - message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.exists !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.exists); - if (message.entry) - cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); - exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ - { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, - { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, - { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse }, - { name: "DeleteCacheEntry", options: {}, I: exports2.DeleteCacheEntryRequest, O: exports2.DeleteCacheEntryResponse }, - { name: "ListCacheEntries", options: {}, I: exports2.ListCacheEntriesRequest, O: exports2.ListCacheEntriesResponse }, - { name: "LookupCacheEntry", options: {}, I: exports2.LookupCacheEntryRequest, O: exports2.LookupCacheEntryResponse } - ]); - } -}); - -// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js -var require_cache_twirp = __commonJS({ - "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + ], + unlockRepoForAuthenticatedUser: [ + "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" + ], + unlockRepoForOrg: [ + "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" + ], + updateImport: [ + "PATCH /repos/{owner}/{repo}/import", + {}, + { + deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + ] + }, + oidc: { + getOidcCustomSubTemplateForOrg: [ + "GET /orgs/{org}/actions/oidc/customization/sub" + ], + updateOidcCustomSubTemplateForOrg: [ + "PUT /orgs/{org}/actions/oidc/customization/sub" + ] + }, + orgs: { + addSecurityManagerTeam: [ + "PUT /orgs/{org}/security-managers/teams/{team_slug}" + ], + assignTeamToOrgRole: [ + "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + assignUserToOrgRole: [ + "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: [ + "PUT /orgs/{org}/outside_collaborators/{username}" + ], + createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], + createInvitation: ["POST /orgs/{org}/invitations"], + createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], + createOrUpdateCustomPropertiesValuesForRepos: [ + "PATCH /orgs/{org}/properties/values" + ], + createOrUpdateCustomProperty: [ + "PUT /orgs/{org}/properties/schema/{custom_property_name}" + ], + createWebhook: ["POST /orgs/{org}/hooks"], + delete: ["DELETE /orgs/{org}"], + deleteCustomOrganizationRole: [ + "DELETE /orgs/{org}/organization-roles/{role_id}" + ], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + enableOrDisableSecurityProductOnAllOrgRepos: [ + "POST /orgs/{org}/{security_product}/{enablement}" + ], + get: ["GET /orgs/{org}"], + getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], + getCustomProperty: [ + "GET /orgs/{org}/properties/schema/{custom_property_name}" + ], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + getWebhookDelivery: [ + "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], + listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], + listOrgRoles: ["GET /orgs/{org}/organization-roles"], + listOrganizationFineGrainedPermissions: [ + "GET /orgs/{org}/organization-fine-grained-permissions" + ], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPatGrantRepositories: [ + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" + ], + listPatGrantRequestRepositories: [ + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" + ], + listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], + listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], + listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], + listWebhooks: ["GET /orgs/{org}/hooks"], + patchCustomOrganizationRole: [ + "PATCH /orgs/{org}/organization-roles/{role_id}" + ], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeCustomProperty: [ + "DELETE /orgs/{org}/properties/schema/{custom_property_name}" + ], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: [ + "DELETE /orgs/{org}/outside_collaborators/{username}" + ], + removePublicMembershipForAuthenticatedUser: [ + "DELETE /orgs/{org}/public_members/{username}" + ], + removeSecurityManagerTeam: [ + "DELETE /orgs/{org}/security-managers/teams/{team_slug}" + ], + reviewPatGrantRequest: [ + "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" + ], + reviewPatGrantRequestsInBulk: [ + "POST /orgs/{org}/personal-access-token-requests" + ], + revokeAllOrgRolesTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" + ], + revokeAllOrgRolesUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}" + ], + revokeOrgRoleTeam: [ + "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" + ], + revokeOrgRoleUser: [ + "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" + ], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: [ + "PUT /orgs/{org}/public_members/{username}" + ], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: [ + "PATCH /user/memberships/orgs/{org}" + ], + updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], + updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}" + ], + deletePackageForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}" + ], + deletePackageForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}" + ], + deletePackageVersionForAuthenticatedUser: [ + "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForOrg: [ + "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + deletePackageVersionForUser: [ + "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getAllPackageVersionsForAPackageOwnedByAnOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + {}, + { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } + ], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions", + {}, + { + renamed: [ + "packages", + "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" + ] } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createCacheServiceServer = exports2.CacheServiceMethodList = exports2.CacheServiceMethod = exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; - var twirp_ts_1 = require_twirp(); - var cache_1 = require_cache2(); - var CacheServiceClientJSON = class { - static { - __name(this, "CacheServiceClientJSON"); - } - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); - this.DeleteCacheEntry.bind(this); - this.ListCacheEntries.bind(this); - this.LookupCacheEntry.bind(this); - } - CreateCacheEntry(request) { - const data = cache_1.CreateCacheEntryRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - FinalizeCacheEntryUpload(request) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - GetCacheEntryDownloadURL(request) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - DeleteCacheEntry(request) { - const data = cache_1.DeleteCacheEntryRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data); - return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - ListCacheEntries(request) { - const data = cache_1.ListCacheEntriesRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data); - return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - LookupCacheEntry(request) { - const data = cache_1.LookupCacheEntryRequest.toJson(request, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data); - return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromJson(data2, { - ignoreUnknownFields: true - })); - } - }; - exports2.CacheServiceClientJSON = CacheServiceClientJSON; - var CacheServiceClientProtobuf = class { - static { - __name(this, "CacheServiceClientProtobuf"); - } - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); - this.DeleteCacheEntry.bind(this); - this.ListCacheEntries.bind(this); - this.LookupCacheEntry.bind(this); - } - CreateCacheEntry(request) { - const data = cache_1.CreateCacheEntryRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); - } - FinalizeCacheEntryUpload(request) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); - } - GetCacheEntryDownloadURL(request) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); - } - DeleteCacheEntry(request) { - const data = cache_1.DeleteCacheEntryRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data); - return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromBinary(data2)); - } - ListCacheEntries(request) { - const data = cache_1.ListCacheEntriesRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data); - return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromBinary(data2)); - } - LookupCacheEntry(request) { - const data = cache_1.LookupCacheEntryRequest.toBinary(request); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data); - return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromBinary(data2)); + ], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByOrg: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" + ], + getAllPackageVersionsForPackageOwnedByUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions" + ], + getPackageForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}" + ], + getPackageForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}" + ], + getPackageForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}" + ], + getPackageVersionForAuthenticatedUser: [ + "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForOrganization: [ + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + getPackageVersionForUser: [ + "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" + ], + listDockerMigrationConflictingPackagesForAuthenticatedUser: [ + "GET /user/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForOrganization: [ + "GET /orgs/{org}/docker/conflicts" + ], + listDockerMigrationConflictingPackagesForUser: [ + "GET /users/{username}/docker/conflicts" + ], + listPackagesForAuthenticatedUser: ["GET /user/packages"], + listPackagesForOrganization: ["GET /orgs/{org}/packages"], + listPackagesForUser: ["GET /users/{username}/packages"], + restorePackageForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" + ], + restorePackageVersionForAuthenticatedUser: [ + "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForOrg: [ + "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ], + restorePackageVersionForUser: [ + "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" + ] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], + createCard: ["POST /projects/columns/{column_id}/cards"], + createColumn: ["POST /projects/{project_id}/columns"], + createForAuthenticatedUser: ["POST /user/projects"], + createForOrg: ["POST /orgs/{org}/projects"], + createForRepo: ["POST /repos/{owner}/{repo}/projects"], + delete: ["DELETE /projects/{project_id}"], + deleteCard: ["DELETE /projects/columns/cards/{card_id}"], + deleteColumn: ["DELETE /projects/columns/{column_id}"], + get: ["GET /projects/{project_id}"], + getCard: ["GET /projects/columns/cards/{card_id}"], + getColumn: ["GET /projects/columns/{column_id}"], + getPermissionForUser: [ + "GET /projects/{project_id}/collaborators/{username}/permission" + ], + listCards: ["GET /projects/columns/{column_id}/cards"], + listCollaborators: ["GET /projects/{project_id}/collaborators"], + listColumns: ["GET /projects/{project_id}/columns"], + listForOrg: ["GET /orgs/{org}/projects"], + listForRepo: ["GET /repos/{owner}/{repo}/projects"], + listForUser: ["GET /users/{username}/projects"], + moveCard: ["POST /projects/columns/cards/{card_id}/moves"], + moveColumn: ["POST /projects/columns/{column_id}/moves"], + removeCollaborator: [ + "DELETE /projects/{project_id}/collaborators/{username}" + ], + update: ["PATCH /projects/{project_id}"], + updateCard: ["PATCH /projects/columns/cards/{card_id}"], + updateColumn: ["PATCH /projects/columns/{column_id}"] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" + ], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + deletePendingReview: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + deleteReviewComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ], + dismissReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" + ], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" + ], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + listReviewComments: [ + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" + ], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: [ + "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + requestReviewers: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" + ], + submitReview: [ + "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" + ], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" + ], + updateReview: [ + "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" + ], + updateReviewComment: [ + "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" + ] + }, + rateLimit: { get: ["GET /rate_limit"] }, + reactions: { + createForCommitComment: [ + "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + createForIssue: [ + "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" + ], + createForIssueComment: [ + "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + createForPullRequestReviewComment: [ + "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + createForRelease: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + createForTeamDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + createForTeamDiscussionInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ], + deleteForCommitComment: [ + "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForIssue: [ + "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" + ], + deleteForIssueComment: [ + "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForPullRequestComment: [ + "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" + ], + deleteForRelease: [ + "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" + ], + deleteForTeamDiscussion: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" + ], + deleteForTeamDiscussionComment: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" + ], + listForCommitComment: [ + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" + ], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], + listForIssueComment: [ + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" + ], + listForPullRequestReviewComment: [ + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" + ], + listForRelease: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" + ], + listForTeamDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" + ], + listForTeamDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" + ] + }, + repos: { + acceptInvitation: [ + "PATCH /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } + ], + acceptInvitationForAuthenticatedUser: [ + "PATCH /user/repository_invitations/{invitation_id}" + ], + addAppAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + addTeamAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + addUserAccessRestrictions: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + cancelPagesDeployment: [ + "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" + ], + checkAutomatedSecurityFixes: [ + "GET /repos/{owner}/{repo}/automated-security-fixes" + ], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: [ + "GET /repos/{owner}/{repo}/vulnerability-alerts" + ], + codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: [ + "GET /repos/{owner}/{repo}/compare/{basehead}" + ], + createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], + createCommitComment: [ + "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + createCommitSignatureProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentBranchPolicy: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + createDeploymentProtectionRule: [ + "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + createDeploymentStatus: [ + "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateCustomPropertiesValues: [ + "PATCH /repos/{owner}/{repo}/properties/values" + ], + createOrUpdateEnvironment: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}" + ], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createOrgRuleset: ["POST /orgs/{org}/rulesets"], + createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages"], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], + createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], + createUsingTemplate: [ + "POST /repos/{template_owner}/{template_repo}/generate" + ], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: [ + "DELETE /user/repository_invitations/{invitation_id}", + {}, + { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } + ], + declineInvitationForAuthenticatedUser: [ + "DELETE /user/repository_invitations/{invitation_id}" + ], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + deleteAdminBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + deleteAnEnvironment: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}" + ], + deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], + deleteBranchProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" + ], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: [ + "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" + ], + deleteDeploymentBranchPolicy: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: [ + "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], + deletePullRequestReviewProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: [ + "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + deleteTagProtection: [ + "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" + ], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: [ + "DELETE /repos/{owner}/{repo}/automated-security-fixes" + ], + disableDeploymentProtectionRule: [ + "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + disablePrivateVulnerabilityReporting: [ + "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + disableVulnerabilityAlerts: [ + "DELETE /repos/{owner}/{repo}/vulnerability-alerts" + ], + downloadArchive: [ + "GET /repos/{owner}/{repo}/zipball/{ref}", + {}, + { renamed: ["repos", "downloadZipballArchive"] } + ], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: [ + "PUT /repos/{owner}/{repo}/automated-security-fixes" + ], + enablePrivateVulnerabilityReporting: [ + "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" + ], + enableVulnerabilityAlerts: [ + "PUT /repos/{owner}/{repo}/vulnerability-alerts" + ], + generateReleaseNotes: [ + "POST /repos/{owner}/{repo}/releases/generate-notes" + ], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" + ], + getAdminBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + getAllDeploymentProtectionRules: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" + ], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" + ], + getAllTopics: ["GET /repos/{owner}/{repo}/topics"], + getAppsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" + ], + getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection" + ], + getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: [ + "GET /repos/{owner}/{repo}/collaborators/{username}/permission" + ], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" + ], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getCustomDeploymentProtectionRule: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" + ], + getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentBranchPolicy: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + getDeploymentStatus: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" + ], + getEnvironment: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}" + ], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], + getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], + getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], + getOrgRulesets: ["GET /orgs/{org}/rulesets"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesDeployment: [ + "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" + ], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getRepoRuleSuite: [ + "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" + ], + getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], + getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], + getStatusChecksProtection: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + getTeamsWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" + ], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: [ + "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" + ], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + getWebhookDelivery: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" + ], + listActivities: ["GET /repos/{owner}/{repo}/activity"], + listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" + ], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" + ], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: [ + "GET /repos/{owner}/{repo}/commits/{ref}/statuses" + ], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listCustomDeploymentRuleIntegrations: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" + ], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentBranchPolicies: [ + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" + ], + listDeploymentStatuses: [ + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" + ], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: [ + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" + ], + listReleaseAssets: [ + "GET /repos/{owner}/{repo}/releases/{release_id}/assets" + ], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhookDeliveries: [ + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" + ], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + redeliverWebhookDelivery: [ + "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" + ], + removeAppAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + removeCollaborator: [ + "DELETE /repos/{owner}/{repo}/collaborators/{username}" + ], + removeStatusCheckContexts: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + removeStatusCheckProtection: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + removeTeamAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + removeUserAccessRestrictions: [ + "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: [ + "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" + ], + setAppAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", + {}, + { mapToData: "apps" } + ], + setStatusCheckContexts: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", + {}, + { mapToData: "contexts" } + ], + setTeamAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", + {}, + { mapToData: "teams" } + ], + setUserAccessRestrictions: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", + {}, + { mapToData: "users" } + ], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: [ + "PUT /repos/{owner}/{repo}/branches/{branch}/protection" + ], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateDeploymentBranchPolicy: [ + "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" + ], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: [ + "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" + ], + updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], + updatePullRequestReviewProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" + ], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: [ + "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" + ], + updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], + updateStatusCheckPotection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", + {}, + { renamed: ["repos", "updateStatusCheckProtection"] } + ], + updateStatusCheckProtection: [ + "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" + ], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: [ + "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" + ], + uploadReleaseAsset: [ + "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", + { baseUrl: "https://uploads.github.com" } + ] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits"], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ], + listAlertsForEnterprise: [ + "GET /enterprises/{enterprise}/secret-scanning/alerts" + ], + listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + listLocationsForAlert: [ + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" + ], + updateAlert: [ + "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" + ] + }, + securityAdvisories: { + createFork: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" + ], + createPrivateVulnerabilityReport: [ + "POST /repos/{owner}/{repo}/security-advisories/reports" + ], + createRepositoryAdvisory: [ + "POST /repos/{owner}/{repo}/security-advisories" + ], + createRepositoryAdvisoryCveRequest: [ + "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" + ], + getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], + getRepositoryAdvisory: [ + "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ], + listGlobalAdvisories: ["GET /advisories"], + listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], + listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], + updateRepositoryAdvisory: [ + "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" + ] + }, + teams: { + addOrUpdateMembershipForUserInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + addOrUpdateProjectPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + addOrUpdateRepoPermissionsInOrg: [ + "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + checkPermissionsForProjectInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + checkPermissionsForRepoInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: [ + "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + deleteDiscussionInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + getDiscussionInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + getMembershipForUserInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" + ], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: [ + "GET /orgs/{org}/teams/{team_slug}/invitations" + ], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" + ], + removeProjectInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" + ], + removeRepoInOrg: [ + "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" + ], + updateDiscussionCommentInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" + ], + updateDiscussionInOrg: [ + "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" + ], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: [ + "POST /user/emails", + {}, + { renamed: ["users", "addEmailForAuthenticatedUser"] } + ], + addEmailForAuthenticatedUser: ["POST /user/emails"], + addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: [ + "POST /user/gpg_keys", + {}, + { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } + ], + createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: [ + "POST /user/keys", + {}, + { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } + ], + createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], + createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], + deleteEmailForAuthenticated: [ + "DELETE /user/emails", + {}, + { renamed: ["users", "deleteEmailForAuthenticatedUser"] } + ], + deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: [ + "DELETE /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } + ], + deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: [ + "DELETE /user/keys/{key_id}", + {}, + { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } + ], + deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], + deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], + deleteSshSigningKeyForAuthenticatedUser: [ + "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: [ + "GET /user/gpg_keys/{gpg_key_id}", + {}, + { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } + ], + getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: [ + "GET /user/keys/{key_id}", + {}, + { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } + ], + getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], + getSshSigningKeyForAuthenticatedUser: [ + "GET /user/ssh_signing_keys/{ssh_signing_key_id}" + ], + list: ["GET /users"], + listBlockedByAuthenticated: [ + "GET /user/blocks", + {}, + { renamed: ["users", "listBlockedByAuthenticatedUser"] } + ], + listBlockedByAuthenticatedUser: ["GET /user/blocks"], + listEmailsForAuthenticated: [ + "GET /user/emails", + {}, + { renamed: ["users", "listEmailsForAuthenticatedUser"] } + ], + listEmailsForAuthenticatedUser: ["GET /user/emails"], + listFollowedByAuthenticated: [ + "GET /user/following", + {}, + { renamed: ["users", "listFollowedByAuthenticatedUser"] } + ], + listFollowedByAuthenticatedUser: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: [ + "GET /user/gpg_keys", + {}, + { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } + ], + listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: [ + "GET /user/public_emails", + {}, + { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } + ], + listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: [ + "GET /user/keys", + {}, + { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } + ], + listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], + listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], + listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], + listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], + listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], + setPrimaryEmailVisibilityForAuthenticated: [ + "PATCH /user/email/visibility", + {}, + { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } + ], + setPrimaryEmailVisibilityForAuthenticatedUser: [ + "PATCH /user/email/visibility" + ], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] } }; - exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; - var CacheServiceMethod; - (function(CacheServiceMethod2) { - CacheServiceMethod2["CreateCacheEntry"] = "CreateCacheEntry"; - CacheServiceMethod2["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload"; - CacheServiceMethod2["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL"; - CacheServiceMethod2["DeleteCacheEntry"] = "DeleteCacheEntry"; - CacheServiceMethod2["ListCacheEntries"] = "ListCacheEntries"; - CacheServiceMethod2["LookupCacheEntry"] = "LookupCacheEntry"; - })(CacheServiceMethod || (exports2.CacheServiceMethod = CacheServiceMethod = {})); - exports2.CacheServiceMethodList = [ - CacheServiceMethod.CreateCacheEntry, - CacheServiceMethod.FinalizeCacheEntryUpload, - CacheServiceMethod.GetCacheEntryDownloadURL, - CacheServiceMethod.DeleteCacheEntry, - CacheServiceMethod.ListCacheEntries, - CacheServiceMethod.LookupCacheEntry - ]; - function createCacheServiceServer(service) { - return new twirp_ts_1.TwirpServer({ - service, - packageName: "github.actions.results.api.v1", - serviceName: "CacheService", - methodList: exports2.CacheServiceMethodList, - matchRoute: matchCacheServiceRoute - }); - } - __name(createCacheServiceServer, "createCacheServiceServer"); - exports2.createCacheServiceServer = createCacheServiceServer; - function matchCacheServiceRoute(method, events) { - switch (method) { - case "CreateCacheEntry": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" }); - yield events.onMatch(ctx); - return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors); - }); - case "FinalizeCacheEntryUpload": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" }); - yield events.onMatch(ctx); - return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors); - }); - case "GetCacheEntryDownloadURL": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" }); - yield events.onMatch(ctx); - return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors); - }); - case "DeleteCacheEntry": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" }); - yield events.onMatch(ctx); - return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors); - }); - case "ListCacheEntries": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" }); - yield events.onMatch(ctx); - return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors); - }); - case "LookupCacheEntry": - return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { - ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" }); - yield events.onMatch(ctx); - return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors); - }); - default: - events.onNotFound(); - const msg = `no handler found`; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(matchCacheServiceRoute, "matchCacheServiceRoute"); - function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceCreateCacheEntryRequest, "handleCacheServiceCreateCacheEntryRequest"); - function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceFinalizeCacheEntryUploadRequest, "handleCacheServiceFinalizeCacheEntryUploadRequest"); - function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceGetCacheEntryDownloadURLRequest, "handleCacheServiceGetCacheEntryDownloadURLRequest"); - function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceDeleteCacheEntryRequest, "handleCacheServiceDeleteCacheEntryRequest"); - function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceListCacheEntriesRequest, "handleCacheServiceListCacheEntriesRequest"); - function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) { - switch (ctx.contentType) { - case twirp_ts_1.TwirpContentType.JSON: - return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors); - case twirp_ts_1.TwirpContentType.Protobuf: - return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors); - default: - const msg = "unexpected Content-Type"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); - } - } - __name(handleCacheServiceLookupCacheEntryRequest, "handleCacheServiceLookupCacheEntryRequest"); - function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.CreateCacheEntryRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.CreateCacheEntry(ctx2, inputReq); - }); - } else { - response = yield service.CreateCacheEntry(ctx, request); - } - return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceCreateCacheEntryJSON, "handleCacheServiceCreateCacheEntryJSON"); - function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.FinalizeCacheEntryUpload(ctx2, inputReq); - }); - } else { - response = yield service.FinalizeCacheEntryUpload(ctx, request); - } - return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceFinalizeCacheEntryUploadJSON, "handleCacheServiceFinalizeCacheEntryUploadJSON"); - function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.GetCacheEntryDownloadURL(ctx2, inputReq); - }); - } else { - response = yield service.GetCacheEntryDownloadURL(ctx, request); - } - return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceGetCacheEntryDownloadURLJSON, "handleCacheServiceGetCacheEntryDownloadURLJSON"); - function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.DeleteCacheEntryRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.DeleteCacheEntry(ctx2, inputReq); - }); - } else { - response = yield service.DeleteCacheEntry(ctx, request); - } - return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceDeleteCacheEntryJSON, "handleCacheServiceDeleteCacheEntryJSON"); - function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.ListCacheEntriesRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.ListCacheEntries(ctx2, inputReq); - }); - } else { - response = yield service.ListCacheEntries(ctx, request); - } - return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceListCacheEntriesJSON, "handleCacheServiceListCacheEntriesJSON"); - function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - const body = JSON.parse(data.toString() || "{}"); - request = cache_1.LookupCacheEntryRequest.fromJson(body, { - ignoreUnknownFields: true - }); - } catch (e) { - if (e instanceof Error) { - const msg = "the json request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.LookupCacheEntry(ctx2, inputReq); - }); - } else { - response = yield service.LookupCacheEntry(ctx, request); - } - return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, { - useProtoFieldName: true, - emitDefaultValues: false - })); - }); - } - __name(handleCacheServiceLookupCacheEntryJSON, "handleCacheServiceLookupCacheEntryJSON"); - function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.CreateCacheEntryRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.CreateCacheEntry(ctx2, inputReq); - }); - } else { - response = yield service.CreateCacheEntry(ctx, request); + var endpoints_default = Endpoints; + var endpointMethodsMap = /* @__PURE__ */ new Map(); + for (const [scope, endpoints] of Object.entries(endpoints_default)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign( + { + method, + url + }, + defaults + ); + if (!endpointMethodsMap.has(scope)) { + endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); } - return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response)); - }); + endpointMethodsMap.get(scope).set(methodName, { + scope, + methodName, + endpointDefaults, + decorations + }); + } } - __name(handleCacheServiceCreateCacheEntryProtobuf, "handleCacheServiceCreateCacheEntryProtobuf"); - function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.FinalizeCacheEntryUpload(ctx2, inputReq); - }); - } else { - response = yield service.FinalizeCacheEntryUpload(ctx, request); + var handler = { + has({ scope }, methodName) { + return endpointMethodsMap.get(scope).has(methodName); + }, + getOwnPropertyDescriptor(target, methodName) { + return { + value: this.get(target, methodName), + // ensures method is in the cache + configurable: true, + writable: true, + enumerable: true + }; + }, + defineProperty(target, methodName, descriptor) { + Object.defineProperty(target.cache, methodName, descriptor); + return true; + }, + deleteProperty(target, methodName) { + delete target.cache[methodName]; + return true; + }, + ownKeys({ scope }) { + return [...endpointMethodsMap.get(scope).keys()]; + }, + set(target, methodName, value) { + return target.cache[methodName] = value; + }, + get({ octokit, scope, cache }, methodName) { + if (cache[methodName]) { + return cache[methodName]; } - return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response)); - }); - } - __name(handleCacheServiceFinalizeCacheEntryUploadProtobuf, "handleCacheServiceFinalizeCacheEntryUploadProtobuf"); - function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } + const method = endpointMethodsMap.get(scope).get(methodName); + if (!method) { + return void 0; } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.GetCacheEntryDownloadURL(ctx2, inputReq); - }); + const { endpointDefaults, decorations } = method; + if (decorations) { + cache[methodName] = decorate( + octokit, + scope, + methodName, + endpointDefaults, + decorations + ); } else { - response = yield service.GetCacheEntryDownloadURL(ctx, request); + cache[methodName] = octokit.request.defaults(endpointDefaults); } - return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response)); - }); + return cache[methodName]; + } + }; + function endpointsToMethods(octokit) { + const newMethods = {}; + for (const scope of endpointMethodsMap.keys()) { + newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); + } + return newMethods; } - __name(handleCacheServiceGetCacheEntryDownloadURLProtobuf, "handleCacheServiceGetCacheEntryDownloadURLProtobuf"); - function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.DeleteCacheEntryRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } - } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.DeleteCacheEntry(ctx2, inputReq); + __name(endpointsToMethods, "endpointsToMethods"); + function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + function withDecorations(...args) { + let options = requestWithDefaults.endpoint.merge(...args); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: void 0 }); - } else { - response = yield service.DeleteCacheEntry(ctx, request); + return requestWithDefaults(options); } - return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response)); - }); - } - __name(handleCacheServiceDeleteCacheEntryProtobuf, "handleCacheServiceDeleteCacheEntryProtobuf"); - function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.ListCacheEntriesRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn( + `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` + ); } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.ListCacheEntries(ctx2, inputReq); - }); - } else { - response = yield service.ListCacheEntries(ctx, request); + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); } - return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response)); - }); - } - __name(handleCacheServiceListCacheEntriesProtobuf, "handleCacheServiceListCacheEntriesProtobuf"); - function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) { - return __awaiter3(this, void 0, void 0, function* () { - let request; - let response; - try { - request = cache_1.LookupCacheEntryRequest.fromBinary(data); - } catch (e) { - if (e instanceof Error) { - const msg = "the protobuf request could not be decoded"; - throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + if (decorations.renamedParameters) { + const options2 = requestWithDefaults.endpoint.merge(...args); + for (const [name, alias] of Object.entries( + decorations.renamedParameters + )) { + if (name in options2) { + octokit.log.warn( + `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` + ); + if (!(alias in options2)) { + options2[alias] = options2[name]; + } + delete options2[name]; + } } + return requestWithDefaults(options2); } - if (interceptors && interceptors.length > 0) { - const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); - response = yield interceptor(ctx, request, (ctx2, inputReq) => { - return service.LookupCacheEntry(ctx2, inputReq); - }); - } else { - response = yield service.LookupCacheEntry(ctx, request); - } - return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response)); - }); + return requestWithDefaults(...args); + } + __name(withDecorations, "withDecorations"); + return Object.assign(withDecorations, requestWithDefaults); } - __name(handleCacheServiceLookupCacheEntryProtobuf, "handleCacheServiceLookupCacheEntryProtobuf"); + __name(decorate, "decorate"); + function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + rest: api + }; + } + __name(restEndpointMethods, "restEndpointMethods"); + restEndpointMethods.VERSION = VERSION3; + function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit); + return { + ...api, + rest: api + }; + } + __name(legacyRestEndpointMethods, "legacyRestEndpointMethods"); + legacyRestEndpointMethods.VERSION = VERSION3; } }); -// ../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js -var require_cacheTwirpClient = __commonJS({ - "../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { +// ../node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js +var require_dist_node10 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/plugin-paginate-rest/dist-node/index.js"(exports2, module2) { "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.internalCacheTwirpClient = void 0; - var core_1 = require_core(); - var user_agent_1 = require_user_agent(); - var errors_1 = require_errors2(); - var config_1 = require_config(); - var cacheUtils_1 = require_cacheUtils(); - var auth_1 = require_auth(); - var http_client_1 = require_lib(); - var cache_twirp_1 = require_cache_twirp(); - var CacheServiceClient = class { - static { - __name(this, "CacheServiceClient"); + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + composePaginateRest: /* @__PURE__ */ __name(() => composePaginateRest, "composePaginateRest"), + isPaginatingEndpoint: /* @__PURE__ */ __name(() => isPaginatingEndpoint, "isPaginatingEndpoint"), + paginateRest: /* @__PURE__ */ __name(() => paginateRest, "paginateRest"), + paginatingEndpoints: /* @__PURE__ */ __name(() => paginatingEndpoints, "paginatingEndpoints") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var VERSION3 = "9.2.1"; + function normalizePaginatedListResponse(response) { + if (!response.data) { + return { + ...response, + data: [] + }; } - constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { - this.maxAttempts = 5; - this.baseRetryIntervalMilliseconds = 3e3; - this.retryMultiplier = 1.5; - const token = (0, cacheUtils_1.getRuntimeToken)(); - this.baseUrl = (0, config_1.getCacheServiceURL)(); - if (maxAttempts) { - this.maxAttempts = maxAttempts; - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier; - } - this.httpClient = new http_client_1.HttpClient(userAgent, [ - new auth_1.BearerCredentialHandler(token) - ]); + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) + return response; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; } - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - request(service, method, contentType, data) { - return __awaiter3(this, void 0, void 0, function* () { - const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; - (0, core_1.debug)(`[Request] ${method} ${url}`); - const headers = { - "Content-Type": contentType - }; - try { - const { body } = yield this.retryableRequest(() => __awaiter3(this, void 0, void 0, function* () { - return this.httpClient.post(url, JSON.stringify(data), headers); - })); - return body; - } catch (error) { - throw new Error(`Failed to ${method}: ${error.message}`); - } - }); + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; } - retryableRequest(operation) { - return __awaiter3(this, void 0, void 0, function* () { - let attempt = 0; - let errorMessage = ""; - let rawBody = ""; - while (attempt < this.maxAttempts) { - let isRetryable = false; + response.data.total_count = totalCount; + return response; + } + __name(normalizePaginatedListResponse, "normalizePaginatedListResponse"); + function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) + return { done: true }; try { - const response = yield operation(); - const statusCode = response.message.statusCode; - rawBody = yield response.readBody(); - (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); - (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); - const body = JSON.parse(rawBody); - (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); - if (this.isSuccessStatusCode(statusCode)) { - return { response, body }; - } - isRetryable = this.isRetryableHttpStatusCode(statusCode); - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; - if (body.msg) { - if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { - throw new errors_1.UsageError(); - } - errorMessage = `${errorMessage}: ${body.msg}`; - } + const response = await requestMethod({ method, url, headers }); + const normalizedResponse = normalizePaginatedListResponse(response); + url = ((normalizedResponse.headers.link || "").match( + /<([^>]+)>;\s*rel="next"/ + ) || [])[1]; + return { value: normalizedResponse }; } catch (error) { - if (error instanceof SyntaxError) { - (0, core_1.debug)(`Raw Body: ${rawBody}`); - } - if (error instanceof errors_1.UsageError) { + if (error.status !== 409) throw error; - } - if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { - throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); - } - isRetryable = true; - errorMessage = error.message; - } - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`); - } - if (attempt + 1 === this.maxAttempts) { - throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; } - const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); - (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); - yield this.sleep(retryTimeMilliseconds); - attempt++; } - throw new Error(`Request failed`); - }); - } - isSuccessStatusCode(statusCode) { - if (!statusCode) - return false; - return statusCode >= 200 && statusCode < 300; - } - isRetryableHttpStatusCode(statusCode) { - if (!statusCode) - return false; - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.GatewayTimeout, - http_client_1.HttpCodes.InternalServerError, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.TooManyRequests - ]; - return retryableStatusCodes.includes(statusCode); - } - sleep(milliseconds) { - return __awaiter3(this, void 0, void 0, function* () { - return new Promise((resolve) => setTimeout(resolve, milliseconds)); - }); + }) + }; + } + __name(iterator, "iterator"); + function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = void 0; } - getExponentialRetryTimeMilliseconds(attempt) { - if (attempt < 0) { - throw new Error("attempt should be a positive integer"); + return gather( + octokit, + [], + iterator(octokit, route, parameters)[Symbol.asyncIterator](), + mapFn + ); + } + __name(paginate, "paginate"); + function gather(octokit, results, iterator2, mapFn) { + return iterator2.next().then((result) => { + if (result.done) { + return results; } - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds; + let earlyExit = false; + function done() { + earlyExit = true; } - const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); - const maxTime = minTime * this.retryMultiplier; - return Math.trunc(Math.random() * (maxTime - minTime) + minTime); + __name(done, "done"); + results = results.concat( + mapFn ? mapFn(result.value, done) : result.value.data + ); + if (earlyExit) { + return results; + } + return gather(octokit, results, iterator2, mapFn); + }); + } + __name(gather, "gather"); + var composePaginateRest = Object.assign(paginate, { + iterator + }); + var paginatingEndpoints = [ + "GET /advisories", + "GET /app/hook/deliveries", + "GET /app/installation-requests", + "GET /app/installations", + "GET /assignments/{assignment_id}/accepted_assignments", + "GET /classrooms", + "GET /classrooms/{classroom_id}/assignments", + "GET /enterprises/{enterprise}/dependabot/alerts", + "GET /enterprises/{enterprise}/secret-scanning/alerts", + "GET /events", + "GET /gists", + "GET /gists/public", + "GET /gists/starred", + "GET /gists/{gist_id}/comments", + "GET /gists/{gist_id}/commits", + "GET /gists/{gist_id}/forks", + "GET /installation/repositories", + "GET /issues", + "GET /licenses", + "GET /marketplace_listing/plans", + "GET /marketplace_listing/plans/{plan_id}/accounts", + "GET /marketplace_listing/stubbed/plans", + "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", + "GET /networks/{owner}/{repo}/events", + "GET /notifications", + "GET /organizations", + "GET /orgs/{org}/actions/cache/usage-by-repository", + "GET /orgs/{org}/actions/permissions/repositories", + "GET /orgs/{org}/actions/runners", + "GET /orgs/{org}/actions/secrets", + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", + "GET /orgs/{org}/actions/variables", + "GET /orgs/{org}/actions/variables/{name}/repositories", + "GET /orgs/{org}/blocks", + "GET /orgs/{org}/code-scanning/alerts", + "GET /orgs/{org}/codespaces", + "GET /orgs/{org}/codespaces/secrets", + "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", + "GET /orgs/{org}/copilot/billing/seats", + "GET /orgs/{org}/dependabot/alerts", + "GET /orgs/{org}/dependabot/secrets", + "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", + "GET /orgs/{org}/events", + "GET /orgs/{org}/failed_invitations", + "GET /orgs/{org}/hooks", + "GET /orgs/{org}/hooks/{hook_id}/deliveries", + "GET /orgs/{org}/installations", + "GET /orgs/{org}/invitations", + "GET /orgs/{org}/invitations/{invitation_id}/teams", + "GET /orgs/{org}/issues", + "GET /orgs/{org}/members", + "GET /orgs/{org}/members/{username}/codespaces", + "GET /orgs/{org}/migrations", + "GET /orgs/{org}/migrations/{migration_id}/repositories", + "GET /orgs/{org}/organization-roles/{role_id}/teams", + "GET /orgs/{org}/organization-roles/{role_id}/users", + "GET /orgs/{org}/outside_collaborators", + "GET /orgs/{org}/packages", + "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", + "GET /orgs/{org}/personal-access-token-requests", + "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", + "GET /orgs/{org}/personal-access-tokens", + "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", + "GET /orgs/{org}/projects", + "GET /orgs/{org}/properties/values", + "GET /orgs/{org}/public_members", + "GET /orgs/{org}/repos", + "GET /orgs/{org}/rulesets", + "GET /orgs/{org}/rulesets/rule-suites", + "GET /orgs/{org}/secret-scanning/alerts", + "GET /orgs/{org}/security-advisories", + "GET /orgs/{org}/teams", + "GET /orgs/{org}/teams/{team_slug}/discussions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", + "GET /orgs/{org}/teams/{team_slug}/invitations", + "GET /orgs/{org}/teams/{team_slug}/members", + "GET /orgs/{org}/teams/{team_slug}/projects", + "GET /orgs/{org}/teams/{team_slug}/repos", + "GET /orgs/{org}/teams/{team_slug}/teams", + "GET /projects/columns/{column_id}/cards", + "GET /projects/{project_id}/collaborators", + "GET /projects/{project_id}/columns", + "GET /repos/{owner}/{repo}/actions/artifacts", + "GET /repos/{owner}/{repo}/actions/caches", + "GET /repos/{owner}/{repo}/actions/organization-secrets", + "GET /repos/{owner}/{repo}/actions/organization-variables", + "GET /repos/{owner}/{repo}/actions/runners", + "GET /repos/{owner}/{repo}/actions/runs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", + "GET /repos/{owner}/{repo}/actions/secrets", + "GET /repos/{owner}/{repo}/actions/variables", + "GET /repos/{owner}/{repo}/actions/workflows", + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", + "GET /repos/{owner}/{repo}/activity", + "GET /repos/{owner}/{repo}/assignees", + "GET /repos/{owner}/{repo}/branches", + "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", + "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", + "GET /repos/{owner}/{repo}/code-scanning/alerts", + "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", + "GET /repos/{owner}/{repo}/code-scanning/analyses", + "GET /repos/{owner}/{repo}/codespaces", + "GET /repos/{owner}/{repo}/codespaces/devcontainers", + "GET /repos/{owner}/{repo}/codespaces/secrets", + "GET /repos/{owner}/{repo}/collaborators", + "GET /repos/{owner}/{repo}/comments", + "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/commits", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", + "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", + "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", + "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", + "GET /repos/{owner}/{repo}/commits/{ref}/status", + "GET /repos/{owner}/{repo}/commits/{ref}/statuses", + "GET /repos/{owner}/{repo}/contributors", + "GET /repos/{owner}/{repo}/dependabot/alerts", + "GET /repos/{owner}/{repo}/dependabot/secrets", + "GET /repos/{owner}/{repo}/deployments", + "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", + "GET /repos/{owner}/{repo}/environments", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", + "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", + "GET /repos/{owner}/{repo}/events", + "GET /repos/{owner}/{repo}/forks", + "GET /repos/{owner}/{repo}/hooks", + "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", + "GET /repos/{owner}/{repo}/invitations", + "GET /repos/{owner}/{repo}/issues", + "GET /repos/{owner}/{repo}/issues/comments", + "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/issues/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", + "GET /repos/{owner}/{repo}/issues/{issue_number}/events", + "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", + "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", + "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", + "GET /repos/{owner}/{repo}/keys", + "GET /repos/{owner}/{repo}/labels", + "GET /repos/{owner}/{repo}/milestones", + "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", + "GET /repos/{owner}/{repo}/notifications", + "GET /repos/{owner}/{repo}/pages/builds", + "GET /repos/{owner}/{repo}/projects", + "GET /repos/{owner}/{repo}/pulls", + "GET /repos/{owner}/{repo}/pulls/comments", + "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", + "GET /repos/{owner}/{repo}/releases", + "GET /repos/{owner}/{repo}/releases/{release_id}/assets", + "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", + "GET /repos/{owner}/{repo}/rules/branches/{branch}", + "GET /repos/{owner}/{repo}/rulesets", + "GET /repos/{owner}/{repo}/rulesets/rule-suites", + "GET /repos/{owner}/{repo}/secret-scanning/alerts", + "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", + "GET /repos/{owner}/{repo}/security-advisories", + "GET /repos/{owner}/{repo}/stargazers", + "GET /repos/{owner}/{repo}/subscribers", + "GET /repos/{owner}/{repo}/tags", + "GET /repos/{owner}/{repo}/teams", + "GET /repos/{owner}/{repo}/topics", + "GET /repositories", + "GET /repositories/{repository_id}/environments/{environment_name}/secrets", + "GET /repositories/{repository_id}/environments/{environment_name}/variables", + "GET /search/code", + "GET /search/commits", + "GET /search/issues", + "GET /search/labels", + "GET /search/repositories", + "GET /search/topics", + "GET /search/users", + "GET /teams/{team_id}/discussions", + "GET /teams/{team_id}/discussions/{discussion_number}/comments", + "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", + "GET /teams/{team_id}/discussions/{discussion_number}/reactions", + "GET /teams/{team_id}/invitations", + "GET /teams/{team_id}/members", + "GET /teams/{team_id}/projects", + "GET /teams/{team_id}/repos", + "GET /teams/{team_id}/teams", + "GET /user/blocks", + "GET /user/codespaces", + "GET /user/codespaces/secrets", + "GET /user/emails", + "GET /user/followers", + "GET /user/following", + "GET /user/gpg_keys", + "GET /user/installations", + "GET /user/installations/{installation_id}/repositories", + "GET /user/issues", + "GET /user/keys", + "GET /user/marketplace_purchases", + "GET /user/marketplace_purchases/stubbed", + "GET /user/memberships/orgs", + "GET /user/migrations", + "GET /user/migrations/{migration_id}/repositories", + "GET /user/orgs", + "GET /user/packages", + "GET /user/packages/{package_type}/{package_name}/versions", + "GET /user/public_emails", + "GET /user/repos", + "GET /user/repository_invitations", + "GET /user/social_accounts", + "GET /user/ssh_signing_keys", + "GET /user/starred", + "GET /user/subscriptions", + "GET /user/teams", + "GET /users", + "GET /users/{username}/events", + "GET /users/{username}/events/orgs/{org}", + "GET /users/{username}/events/public", + "GET /users/{username}/followers", + "GET /users/{username}/following", + "GET /users/{username}/gists", + "GET /users/{username}/gpg_keys", + "GET /users/{username}/keys", + "GET /users/{username}/orgs", + "GET /users/{username}/packages", + "GET /users/{username}/projects", + "GET /users/{username}/received_events", + "GET /users/{username}/received_events/public", + "GET /users/{username}/repos", + "GET /users/{username}/social_accounts", + "GET /users/{username}/ssh_signing_keys", + "GET /users/{username}/starred", + "GET /users/{username}/subscriptions" + ]; + function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; } - }; - function internalCacheTwirpClient(options) { - const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); - return new cache_twirp_1.CacheServiceClientJSON(client); } - __name(internalCacheTwirpClient, "internalCacheTwirpClient"); - exports2.internalCacheTwirpClient = internalCacheTwirpClient; + __name(isPaginatingEndpoint, "isPaginatingEndpoint"); + function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; + } + __name(paginateRest, "paginateRest"); + paginateRest.VERSION = VERSION3; } }); -// ../node_modules/@actions/cache/lib/internal/tar.js -var require_tar = __commonJS({ - "../node_modules/@actions/cache/lib/internal/tar.js"(exports2) { +// ../node_modules/@actions/github/lib/utils.js +var require_utils6 = __commonJS({ + "../node_modules/@actions/github/lib/utils.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -98944,235 +99494,39 @@ var require_tar = __commonJS({ __setModuleDefault3(result, mod); return result; }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getOctokitOptions = exports2.GitHub = exports2.defaults = exports2.context = void 0; + var Context = __importStar3(require_context2()); + var Utils = __importStar3(require_utils5()); + var core_1 = require_dist_node8(); + var plugin_rest_endpoint_methods_1 = require_dist_node9(); + var plugin_paginate_rest_1 = require_dist_node10(); + exports2.context = new Context.Context(); + var baseUrl = Utils.getApiBaseUrl(); + exports2.defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl), + fetch: Utils.getProxyFetch(baseUrl) } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTar = exports2.extractTar = exports2.listTar = void 0; - var exec_1 = require_exec(); - var io2 = __importStar3(require_io()); - var fs_1 = require("fs"); - var path2 = __importStar3(require("path")); - var utils = __importStar3(require_cacheUtils()); - var constants_1 = require_constants7(); - var IS_WINDOWS = process.platform === "win32"; - function getTarPath() { - return __awaiter3(this, void 0, void 0, function* () { - switch (process.platform) { - case "win32": { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else if ((0, fs_1.existsSync)(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; - } - break; - } - case "darwin": { - const gnuTar = yield io2.which("gtar", false); - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else { - return { - path: yield io2.which("tar", true), - type: constants_1.ArchiveToolType.BSD - }; - } - } - default: - break; - } - return { - path: yield io2.which("tar", true), - type: constants_1.ArchiveToolType.GNU - }; - }); - } - __name(getTarPath, "getTarPath"); - function getTarArgs(tarPath, compressionMethod, type, archivePath = "") { - return __awaiter3(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = "cache.tar"; - const workingDirectory = getWorkingDirectory(); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (type) { - case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); - break; - case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/")); - break; - case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P"); - break; - } - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case "win32": - args.push("--force-local"); - break; - case "darwin": - args.push("--delay-directory-restore"); - break; - } - } - return args; - }); - } - __name(getTarArgs, "getTarArgs"); - function getCommands(compressionMethod, type, archivePath = "") { - return __awaiter3(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== "create") { - args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; - } else { - args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(" ")]; - }); - } - __name(getCommands, "getCommands"); - function getWorkingDirectory() { - var _a; - return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); - } - __name(getWorkingDirectory, "getWorkingDirectory"); - function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter3(this, void 0, void 0, function* () { - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -d --long=30 --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -d --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; - default: - return ["-z"]; - } - }); - } - __name(getDecompressionProgram, "getDecompressionProgram"); - function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), - constants_1.TarFilename - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), - constants_1.TarFilename - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; - default: - return ["-z"]; - } - }); - } - __name(getCompressionProgram, "getCompressionProgram"); - function execCommands(commands, cwd) { - return __awaiter3(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield (0, exec_1.exec)(command, void 0, { - cwd, - env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) - }); - } catch (error) { - throw new Error(`${command.split(" ")[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); - } - __name(execCommands, "execCommands"); - function listTar(archivePath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, "list", archivePath); - yield execCommands(commands); - }); - } - __name(listTar, "listTar"); - exports2.listTar = listTar; - function extractTar(archivePath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const workingDirectory = getWorkingDirectory(); - yield io2.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, "extract", archivePath); - yield execCommands(commands); - }); - } - __name(extractTar, "extractTar"); - exports2.extractTar = extractTar; - function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path2.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); - const commands = yield getCommands(compressionMethod, "create"); - yield execCommands(commands, archiveFolder); - }); + exports2.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports2.defaults); + function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; } - __name(createTar, "createTar"); - exports2.createTar = createTar; + __name(getOctokitOptions, "getOctokitOptions"); + exports2.getOctokitOptions = getOctokitOptions; } }); -// ../node_modules/@actions/cache/lib/cache.js -var require_cache3 = __commonJS({ - "../node_modules/@actions/cache/lib/cache.js"(exports2) { +// ../node_modules/@actions/github/lib/github.js +var require_github = __commonJS({ + "../node_modules/@actions/github/lib/github.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -99201,371 +99555,17 @@ var require_cache3 = __commonJS({ __setModuleDefault3(result, mod); return result; }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core2 = __importStar3(require_core()); - var path2 = __importStar3(require("path")); - var utils = __importStar3(require_cacheUtils()); - var cacheHttpClient = __importStar3(require_cacheHttpClient()); - var cacheTwirpClient = __importStar3(require_cacheTwirpClient()); - var config_1 = require_config(); - var tar_1 = require_tar(); - var constants_1 = require_constants7(); - var ValidationError = class _ValidationError extends Error { - static { - __name(this, "ValidationError"); - } - constructor(message) { - super(message); - this.name = "ValidationError"; - Object.setPrototypeOf(this, _ValidationError.prototype); - } - }; - exports2.ValidationError = ValidationError; - var ReserveCacheError = class _ReserveCacheError extends Error { - static { - __name(this, "ReserveCacheError"); - } - constructor(message) { - super(message); - this.name = "ReserveCacheError"; - Object.setPrototypeOf(this, _ReserveCacheError.prototype); - } - }; - exports2.ReserveCacheError = ReserveCacheError; - function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); - } - } - __name(checkPaths, "checkPaths"); - function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); - } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); - } - } - __name(checkKey, "checkKey"); - function isFeatureAvailable() { - return !!process.env["ACTIONS_CACHE_URL"]; - } - __name(isFeatureAvailable, "isFeatureAvailable"); - exports2.isFeatureAvailable = isFeatureAvailable; - function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core2.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - switch (cacheServiceVersion) { - case "v2": - return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - case "v1": - default: - return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - } - }); - } - __name(restoreCache, "restoreCache"); - exports2.restoreCache = restoreCache; - function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core2.debug("Resolved Keys:"); - core2.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - const compressionMethod = yield utils.getCompressionMethod(); - let archivePath = ""; - try { - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod, - enableCrossOsArchive - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return void 0; - } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core2.info("Lookup only - skipping download"); - return cacheEntry.cacheKey; - } - archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive Path: ${archivePath}`); - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core2.info("Cache restored successfully"); - return cacheEntry.cacheKey; - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else { - core2.warning(`Failed to restore: ${error.message}`); - } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); - } - } - return void 0; - }); - } - __name(restoreCacheV1, "restoreCacheV1"); - function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core2.debug("Resolved Keys:"); - core2.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - let archivePath = ""; - try { - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - const compressionMethod = yield utils.getCompressionMethod(); - const request = { - key: primaryKey, - restoreKeys, - version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) - }; - const response = yield twirpClient.GetCacheEntryDownloadURL(request); - if (!response.ok) { - core2.warning(`Cache not found for keys: ${keys.join(", ")}`); - return void 0; - } - core2.info(`Cache hit for: ${request.key}`); - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core2.info("Lookup only - skipping download"); - return response.matchedKey; - } - archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive path: ${archivePath}`); - core2.debug(`Starting download of archive to: ${archivePath}`); - yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core2.info("Cache restored successfully"); - return response.matchedKey; - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else { - core2.warning(`Failed to restore: ${error.message}`); - } - } finally { - try { - if (archivePath) { - yield utils.unlinkFile(archivePath); - } - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); - } - } - return void 0; - }); - } - __name(restoreCacheV2, "restoreCacheV2"); - function saveCache(paths, key, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core2.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - checkKey(key); - switch (cacheServiceVersion) { - case "v2": - return yield saveCacheV2(paths, key, options, enableCrossOsArchive); - case "v1": - default: - return yield saveCacheV1(paths, key, options, enableCrossOsArchive); - } - }); - } - __name(saveCache, "saveCache"); - exports2.saveCache = saveCache; - function saveCacheV1(paths, key, options, enableCrossOsArchive = false) { - var _a, _b, _c, _d, _e; - return __awaiter3(this, void 0, void 0, function* () { - const compressionMethod = yield utils.getCompressionMethod(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core2.debug("Cache Paths:"); - core2.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); - } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - core2.debug("Reserving Cache"); - const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod, - enableCrossOsArchive, - cacheSize: archiveFileSize - }); - if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { - cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; - } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { - throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); - } else { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); - } - core2.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else if (typedError.name === ReserveCacheError.name) { - core2.info(`Failed to save: ${typedError.message}`); - } else { - core2.warning(`Failed to save: ${typedError.message}`); - } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); - } - } - return cacheId; - }); - } - __name(saveCacheV1, "saveCacheV1"); - function saveCacheV2(paths, key, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); - const compressionMethod = yield utils.getCompressionMethod(); - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core2.debug("Cache Paths:"); - core2.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); - } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - options.archiveSizeBytes = archiveFileSize; - core2.debug("Reserving Cache"); - const version3 = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); - const request = { - key, - version: version3 - }; - const response = yield twirpClient.CreateCacheEntry(request); - if (!response.ok) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core2.debug(`Attempting to upload cache located at: ${archivePath}`); - yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options); - const finalizeRequest = { - key, - version: version3, - sizeBytes: `${archiveFileSize}` - }; - const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core2.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); - if (!finalizeResponse.ok) { - throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); - } - cacheId = parseInt(finalizeResponse.entryId); - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else if (typedError.name === ReserveCacheError.name) { - core2.info(`Failed to save: ${typedError.message}`); - } else { - core2.warning(`Failed to save: ${typedError.message}`); - } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); - } - } - return cacheId; - }); + exports2.getOctokit = exports2.context = void 0; + var Context = __importStar3(require_context2()); + var utils_12 = require_utils6(); + exports2.context = new Context.Context(); + function getOctokit(token, options, ...additionalPlugins) { + const GitHubWithPlugins = utils_12.GitHub.plugin(...additionalPlugins); + return new GitHubWithPlugins((0, utils_12.getOctokitOptions)(token, options)); } - __name(saveCacheV2, "saveCacheV2"); + __name(getOctokit, "getOctokit"); + exports2.getOctokit = getOctokit; } }); @@ -134663,7 +134663,7 @@ var require_download_artifact = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.downloadArtifactInternal = exports2.downloadArtifactPublic = exports2.streamExtractExternal = void 0; var promises_1 = __importDefault2(require("fs/promises")); - var github2 = __importStar3(require_github2()); + var github = __importStar3(require_github2()); var core2 = __importStar3(require_core()); var httpClient = __importStar3(require_lib()); var unzip_stream_1 = __importDefault2(require_unzip()); @@ -134743,7 +134743,7 @@ var require_download_artifact = __commonJS({ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) { return __awaiter3(this, void 0, void 0, function* () { const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); - const api = github2.getOctokit(token); + const api = github.getOctokit(token); core2.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`); const { headers, status } = yield api.rest.actions.downloadArtifact({ owner: repositoryOwner, @@ -136414,8 +136414,8 @@ var require_get_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github2 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); - const getArtifactResp = yield github2.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { + const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const getArtifactResp = yield github.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts{?name}", { owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -136541,9 +136541,9 @@ var require_delete_artifact = __commonJS({ retry: retryOpts, request: requestOpts }; - const github2 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token); - const deleteArtifactResp = yield github2.rest.actions.deleteArtifact({ + const deleteArtifactResp = yield github.rest.actions.deleteArtifact({ owner: repositoryOwner, repo: repositoryName, artifact_id: getArtifactResp.artifact.id @@ -136655,9 +136655,9 @@ var require_list_artifacts = __commonJS({ retry: retryOpts, request: requestOpts }; - const github2 = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); + const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog); let currentPageNumber = 1; - const { data: listArtifactResponse } = yield github2.rest.actions.listWorkflowRunArtifacts({ + const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({ owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -136681,7 +136681,7 @@ var require_list_artifacts = __commonJS({ for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) { currentPageNumber++; (0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`); - const { data: listArtifactResponse2 } = yield github2.rest.actions.listWorkflowRunArtifacts({ + const { data: listArtifactResponse2 } = yield github.rest.actions.listWorkflowRunArtifacts({ owner: repositoryOwner, repo: repositoryName, run_id: workflowRunId, @@ -137570,7 +137570,7 @@ var require_utils10 = __commonJS({ var cache = __importStar3(require_cache3()); var core2 = __importStar3(require_core()); var exec = __importStar3(require_exec()); - var github2 = __importStar3(require_github()); + var github = __importStar3(require_github()); var tc = __importStar3(require_tool_cache()); var artifact_1 = __importDefault2(require_artifact2()); var annotations_1 = require_annotations(); @@ -137608,7 +137608,7 @@ var require_utils10 = __commonJS({ __name(getInputs, "getInputs"); function getPrSha() { return __awaiter3(this, void 0, void 0, function* () { - const pr = github2.context.payload.pull_request; + const pr = github.context.payload.pull_request; if (process.env.QODANA_PR_SHA) { return process.env.QODANA_PR_SHA; } @@ -137627,7 +137627,7 @@ var require_utils10 = __commonJS({ } __name(getPrSha, "getPrSha"); function getHeadSha() { - const c = github2.context; + const c = github.context; const pr = c.payload.pull_request; if (process.env.QODANA_REVISION) { return process.env.QODANA_REVISION; @@ -137659,12 +137659,12 @@ var require_utils10 = __commonJS({ __name(qodana, "qodana"); function pushQuickFixes(mode, commitMessage) { return __awaiter3(this, void 0, void 0, function* () { - var _a; + var _a, _b; if (mode === qodana_12.NONE) { return; } try { - const c = github2.context; + const c = github.context; const pr = c.payload.pull_request; let currentBranch = c.ref; if ((_a = pr === null || pr === void 0 ? void 0 : pr.head) === null || _a === void 0 ? void 0 : _a.ref) { @@ -137686,6 +137686,11 @@ var require_utils10 = __commonJS({ return; } if (mode === qodana_12.BRANCH) { + if ((_b = pr === null || pr === void 0 ? void 0 : pr.head) === null || _b === void 0 ? void 0 : _b.ref) { + const commitToCherryPick = (yield exec.getExecOutput("git", ["rev-parse", "HEAD"])).stdout.trim(); + yield git(["checkout", currentBranch]); + yield git(["cherry-pick", commitToCherryPick]); + } yield git(["push", "origin", currentBranch]); } else if (mode === qodana_12.PULL_REQUEST) { const newBranch = `qodana/quick-fixes-${currentCommit.slice(0, 7)}`; @@ -137793,8 +137798,8 @@ var require_utils10 = __commonJS({ core2.warning('Turn on "use-cache" option to use "cache-default-branch-only"'); } if (useCaches && cacheDefaultBranchOnly) { - const currentBranch = github2.context.payload.ref_name; - const defaultBranch = (_a = github2.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; + const currentBranch = github.context.payload.ref_name; + const defaultBranch = (_a = github.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; core2.debug(`Current branch: ${currentBranch} | Default branch: ${defaultBranch}`); return currentBranch === `refs/heads/${defaultBranch}`; } @@ -137805,22 +137810,22 @@ var require_utils10 = __commonJS({ if (!process.env["GITHUB_REPOSITORY"]) { return ""; } - const runId = github2.context.runId; - const repo = github2.context.repo; + const runId = github.context.runId; + const repo = github.context.repo; const serverUrl = process.env["GITHUB_SERVER_URL"] || "https://github.com"; return `${serverUrl}/${repo.owner}/${repo.repo}/actions/runs/${runId}`; } __name(getWorkflowRunUrl, "getWorkflowRunUrl"); function postResultsToPRComments(toolName, content, postComment) { return __awaiter3(this, void 0, void 0, function* () { - const pr = github2.context.payload.pull_request; + const pr = github.context.payload.pull_request; if (!postComment || !pr) { return; } const comment_tag_pattern = ``; const body = `${content} ${comment_tag_pattern}`; - const client = github2.getOctokit(getInputs().githubToken); + const client = github.getOctokit(getInputs().githubToken); const comment_id = yield findCommentByTag(client, comment_tag_pattern); if (comment_id !== -1) { yield updateComment(client, comment_id, body); @@ -137833,7 +137838,7 @@ ${comment_tag_pattern}`; function findCommentByTag(client, tag) { return __awaiter3(this, void 0, void 0, function* () { try { - const { data: comments } = yield client.rest.issues.listComments(Object.assign(Object.assign({}, github2.context.repo), { issue_number: github2.context.issue.number })); + const { data: comments } = yield client.rest.issues.listComments(Object.assign(Object.assign({}, github.context.repo), { issue_number: github.context.issue.number })); const comment = comments.find((c) => { var _a; return (_a = c === null || c === void 0 ? void 0 : c.body) === null || _a === void 0 ? void 0 : _a.includes(tag); @@ -137850,9 +137855,9 @@ ${comment_tag_pattern}`; return __awaiter3(this, void 0, void 0, function* () { try { yield client.rest.issues.createComment({ - owner: github2.context.repo.owner, - repo: github2.context.repo.repo, - issue_number: github2.context.issue.number, + owner: github.context.repo.owner, + repo: github.context.repo.repo, + issue_number: github.context.issue.number, body }); } catch (error) { @@ -137865,8 +137870,8 @@ ${comment_tag_pattern}`; return __awaiter3(this, void 0, void 0, function* () { try { yield client.rest.issues.updateComment({ - owner: github2.context.repo.owner, - repo: github2.context.repo.repo, + owner: github.context.repo.owner, + repo: github.context.repo.repo, comment_id, body }); @@ -137878,25 +137883,25 @@ ${comment_tag_pattern}`; __name(updateComment, "updateComment"); function putReaction(newReaction, oldReaction) { return __awaiter3(this, void 0, void 0, function* () { - const pr = github2.context.payload.pull_request; + const pr = github.context.payload.pull_request; if (!pr) { return; } - const client = github2.getOctokit(getInputs().githubToken); + const client = github.getOctokit(getInputs().githubToken); const issue_number = pr.number; if (oldReaction !== "") { try { - const { data: reactions } = yield client.rest.reactions.listForIssue(Object.assign(Object.assign({}, github2.context.repo), { issue_number })); + const { data: reactions } = yield client.rest.reactions.listForIssue(Object.assign(Object.assign({}, github.context.repo), { issue_number })); const previousReaction = reactions.find((r) => r.content === oldReaction); if (previousReaction) { - yield client.rest.reactions.deleteForIssue(Object.assign(Object.assign({}, github2.context.repo), { issue_number, reaction_id: previousReaction.id })); + yield client.rest.reactions.deleteForIssue(Object.assign(Object.assign({}, github.context.repo), { issue_number, reaction_id: previousReaction.id })); } } catch (error) { core2.debug(`Failed to delete the initial reaction \u2013 ${error.message}`); } } try { - yield client.rest.reactions.createForIssue(Object.assign(Object.assign({}, github2.context.repo), { issue_number, content: newReaction })); + yield client.rest.reactions.createForIssue(Object.assign(Object.assign({}, github.context.repo), { issue_number, content: newReaction })); } catch (error) { core2.debug(`Failed to set reaction \u2013 ${error.message}`); } @@ -137906,14 +137911,14 @@ ${comment_tag_pattern}`; function publishGitHubCheck(failedByThreshold, name, output) { return __awaiter3(this, void 0, void 0, function* () { const conclusion = (0, annotations_1.getGitHubCheckConclusion)(output.annotations, failedByThreshold); - const c = github2.context; + const c = github.context; const pr = c.payload.pull_request; let sha = c.sha; if (pr) { sha = pr.head.sha; } - const client = github2.getOctokit(getInputs().githubToken); - const result = yield client.rest.checks.listForRef(Object.assign(Object.assign({}, github2.context.repo), { ref: sha })); + const client = github.getOctokit(getInputs().githubToken); + const result = yield client.rest.checks.listForRef(Object.assign(Object.assign({}, github.context.repo), { ref: sha })); const checkExists = result.data.check_runs.find((check) => check.name === name); if (checkExists) { yield updateCheck(client, conclusion, checkExists.id, output); @@ -137925,7 +137930,7 @@ ${comment_tag_pattern}`; __name(publishGitHubCheck, "publishGitHubCheck"); function createCheck(client, conclusion, head_sha, name, output) { return __awaiter3(this, void 0, void 0, function* () { - yield client.rest.checks.create(Object.assign(Object.assign({}, github2.context.repo), { + yield client.rest.checks.create(Object.assign(Object.assign({}, github.context.repo), { accept: "application/vnd.github.v3+json", status: "completed", head_sha, @@ -137938,7 +137943,7 @@ ${comment_tag_pattern}`; __name(createCheck, "createCheck"); function updateCheck(client, conclusion, check_run_id, output) { return __awaiter3(this, void 0, void 0, function* () { - yield client.rest.checks.update(Object.assign(Object.assign({}, github2.context.repo), { + yield client.rest.checks.update(Object.assign(Object.assign({}, github.context.repo), { accept: "application/vnd.github.v3+json", status: "completed", conclusion, @@ -138057,7 +138062,6 @@ var __awaiter2 = exports && exports.__awaiter || function(thisArg, _arguments, P }; Object.defineProperty(exports, "__esModule", { value: true }); var core = __importStar2(require_core()); -var github = __importStar2(require_github()); var io = __importStar2(require_io()); var qodana_1 = (init_qodana(), __toCommonJS(qodana_exports)); var utils_1 = require_utils10(); @@ -138071,10 +138075,6 @@ function main() { return __awaiter2(this, void 0, void 0, function* () { try { const inputs = (0, utils_1.getInputs)(); - if (inputs.pushFixes !== qodana_1.NONE && inputs.prMode && github.context.payload.pull_request !== void 0) { - inputs.pushFixes = qodana_1.NONE; - core.warning(`push-fixes is currently not supported with pr-mode: true in pull requests. Running Qodana with push-fixes: ${inputs.pushFixes}.`); - } yield io.mkdirP(inputs.resultsDir); yield io.mkdirP(inputs.cacheDir); const restoreCachesPromise = (0, utils_1.restoreCaches)(inputs.cacheDir, inputs.primaryCacheKey, inputs.additionalCacheKey, inputs.useCaches); diff --git a/scan/src/main.ts b/scan/src/main.ts index fbec2563..e5973946 100644 --- a/scan/src/main.ts +++ b/scan/src/main.ts @@ -15,13 +15,11 @@ */ import * as core from '@actions/core' -import * as github from '@actions/github' import * as io from '@actions/io' import { FAIL_THRESHOLD_OUTPUT, QodanaExitCode, isExecutionSuccessful, - NONE, extractArg } from '../../common/qodana' import { @@ -62,16 +60,6 @@ function setFailed(message: string): void { async function main(): Promise { try { const inputs = getInputs() - if ( - inputs.pushFixes !== NONE && - inputs.prMode && - github.context.payload.pull_request !== undefined - ) { - inputs.pushFixes = NONE - core.warning( - `push-fixes is currently not supported with pr-mode: true in pull requests. Running Qodana with push-fixes: ${inputs.pushFixes}.` - ) - } await io.mkdirP(inputs.resultsDir) await io.mkdirP(inputs.cacheDir) diff --git a/scan/src/utils.ts b/scan/src/utils.ts index 270501e7..0ed7b358 100644 --- a/scan/src/utils.ts +++ b/scan/src/utils.ts @@ -191,6 +191,13 @@ export async function pushQuickFixes( return } if (mode === BRANCH) { + if (pr?.head?.ref) { + const commitToCherryPick = ( + await exec.getExecOutput('git', ['rev-parse', 'HEAD']) + ).stdout.trim() + await git(['checkout', currentBranch]) + await git(['cherry-pick', commitToCherryPick]) + } await git(['push', 'origin', currentBranch]) } else if (mode === PULL_REQUEST) { const newBranch = `qodana/quick-fixes-${currentCommit.slice(0, 7)}` diff --git a/vsts/vss-extension.dev.json b/vsts/vss-extension.dev.json index fcd350c3..60210a1a 100644 --- a/vsts/vss-extension.dev.json +++ b/vsts/vss-extension.dev.json @@ -2,7 +2,7 @@ "manifestVersion": 1, "id": "qodana-dev", "name": "Qodana (Dev)", - "version": "2024.3.132", + "version": "2024.3.134", "publisher": "JetBrains", "targets": [ {