diff --git a/lib/start-proxy-action.js b/lib/start-proxy-action.js index 2cd120784e..3f6f400795 100644 --- a/lib/start-proxy-action.js +++ b/lib/start-proxy-action.js @@ -204,7 +204,7 @@ var require_file_command = __commonJS({ exports2.issueFileCommand = issueFileCommand; exports2.prepareKeyValueMessage = prepareKeyValueMessage; var crypto2 = __importStar2(require("crypto")); - var fs = __importStar2(require("fs")); + var fs2 = __importStar2(require("fs")); var os2 = __importStar2(require("os")); var utils_1 = require_utils(); function issueFileCommand(command, message) { @@ -212,10 +212,10 @@ var require_file_command = __commonJS({ if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } - if (!fs.existsSync(filePath)) { + if (!fs2.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { + fs2.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } @@ -1337,14 +1337,14 @@ var require_util = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol || ""}//${url.hostname || ""}:${port}`; - let path3 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path4 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path3 && path3[0] !== "/") { - path3 = `/${path3}`; + if (path4 && path4[0] !== "/") { + path4 = `/${path4}`; } - return new URL(`${origin}${path3}`); + return new URL(`${origin}${path4}`); } if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -1795,39 +1795,39 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path3); + debuglog("sending request to %s %s/%s", method, origin, path4); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path3, origin }, + request: { method, path: path4, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path3, + path4, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path3); + debuglog("trailers received from %s %s/%s", method, origin, path4); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path3, origin }, + request: { method, path: path4, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path3, + path4, error3.message ); }); @@ -1876,9 +1876,9 @@ var require_diagnostics = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path3); + debuglog("sending request to %s %s/%s", method, origin, path4); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -1941,7 +1941,7 @@ var require_request = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path3, + path: path4, method, body, headers, @@ -1956,11 +1956,11 @@ var require_request = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path3 !== "string") { + if (typeof path4 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path3[0] !== "/" && !(path3.startsWith("http://") || path3.startsWith("https://")) && method !== "CONNECT") { + } else if (path4[0] !== "/" && !(path4.startsWith("http://") || path4.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path3)) { + } else if (invalidPathRegex.test(path4)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -2023,7 +2023,7 @@ var require_request = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path3, query) : path3; + this.path = query ? buildURL(path4, query) : path4; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -6536,7 +6536,7 @@ var require_client_h1 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path3, host, upgrade, blocking, reset } = request2; + const { method, path: path4, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -6602,7 +6602,7 @@ var require_client_h1 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path3} HTTP/1.1\r + let header = `${method} ${path4} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -7128,7 +7128,7 @@ var require_client_h2 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path3, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path4, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -7195,7 +7195,7 @@ var require_client_h2 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path3; + headers[HTTP2_HEADER_PATH] = path4; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -7548,9 +7548,9 @@ var require_redirect_handler = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path3 = search ? `${pathname}${search}` : pathname; + const path4 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path3; + this.opts.path = path4; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -8784,10 +8784,10 @@ var require_proxy_agent = __commonJS({ }; const { origin, - path: path3 = "/", + path: path4 = "/", headers = {} } = opts; - opts.path = origin + path3; + opts.path = origin + path4; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -10708,20 +10708,20 @@ var require_mock_utils = __commonJS({ } return true; } - function safeUrl(path3) { - if (typeof path3 !== "string") { - return path3; + function safeUrl(path4) { + if (typeof path4 !== "string") { + return path4; } - const pathSegments = path3.split("?"); + const pathSegments = path4.split("?"); if (pathSegments.length !== 2) { - return path3; + return path4; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path3, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path3); + function matchKey(mockDispatch2, { path: path4, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path4); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -10743,7 +10743,7 @@ var require_mock_utils = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path3 }) => matchValue(safeUrl(path3), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path4 }) => matchValue(safeUrl(path4), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -10781,9 +10781,9 @@ var require_mock_utils = __commonJS({ } } function buildKey(opts) { - const { path: path3, method, body, headers, query } = opts; + const { path: path4, method, body, headers, query } = opts; return { - path: path3, + path: path4, method, body, headers, @@ -11246,10 +11246,10 @@ var require_pending_interceptors_formatter = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path3, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path4, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path3, + Path: path4, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -16130,9 +16130,9 @@ var require_util6 = __commonJS({ } } } - function validateCookiePath(path3) { - for (let i = 0; i < path3.length; ++i) { - const code = path3.charCodeAt(i); + function validateCookiePath(path4) { + for (let i = 0; i < path4.length; ++i) { + const code = path4.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -18726,11 +18726,11 @@ var require_undici = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path3 = opts.path; + let path4 = opts.path; if (!opts.path.startsWith("/")) { - path3 = `/${path3}`; + path4 = `/${path4}`; } - url = new URL(util.parseOrigin(url).origin + path3); + url = new URL(util.parseOrigin(url).origin + path4); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -20033,7 +20033,7 @@ var require_path_utils = __commonJS({ exports2.toPosixPath = toPosixPath; exports2.toWin32Path = toWin32Path; exports2.toPlatformPath = toPlatformPath; - var path3 = __importStar2(require("path")); + var path4 = __importStar2(require("path")); function toPosixPath(pth) { return pth.replace(/[\\]/g, "/"); } @@ -20041,7 +20041,7 @@ var require_path_utils = __commonJS({ return pth.replace(/[/]/g, "\\"); } function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path3.sep); + return pth.replace(/[/\\]/g, path4.sep); } } }); @@ -20123,13 +20123,13 @@ var require_io_util = __commonJS({ exports2.isRooted = isRooted; exports2.tryGetExecutablePath = tryGetExecutablePath; exports2.getCmdPath = getCmdPath; - var fs = __importStar2(require("fs")); - var path3 = __importStar2(require("path")); - _a = fs.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + var fs2 = __importStar2(require("fs")); + var path4 = __importStar2(require("path")); + _a = fs2.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; exports2.IS_WINDOWS = process.platform === "win32"; function readlink(fsPath) { return __awaiter2(this, void 0, void 0, function* () { - const result = yield fs.promises.readlink(fsPath); + const result = yield fs2.promises.readlink(fsPath); if (exports2.IS_WINDOWS && !result.endsWith("\\")) { return `${result}\\`; } @@ -20137,7 +20137,7 @@ var require_io_util = __commonJS({ }); } exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs.constants.O_RDONLY; + exports2.READONLY = fs2.constants.O_RDONLY; function exists(fsPath) { return __awaiter2(this, void 0, void 0, function* () { try { @@ -20179,7 +20179,7 @@ var require_io_util = __commonJS({ } if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { - const upperExt = path3.extname(filePath).toUpperCase(); + const upperExt = path4.extname(filePath).toUpperCase(); if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { return filePath; } @@ -20203,11 +20203,11 @@ var require_io_util = __commonJS({ if (stats && stats.isFile()) { if (exports2.IS_WINDOWS) { try { - const directory = path3.dirname(filePath); - const upperName = path3.basename(filePath).toUpperCase(); + const directory = path4.dirname(filePath); + const upperName = path4.basename(filePath).toUpperCase(); for (const actualName of yield (0, exports2.readdir)(directory)) { if (upperName === actualName.toUpperCase()) { - filePath = path3.join(directory, actualName); + filePath = path4.join(directory, actualName); break; } } @@ -20319,7 +20319,7 @@ var require_io = __commonJS({ exports2.which = which4; exports2.findInPath = findInPath; var assert_1 = require("assert"); - var path3 = __importStar2(require("path")); + var path4 = __importStar2(require("path")); var ioUtil = __importStar2(require_io_util()); function cp(source_1, dest_1) { return __awaiter2(this, arguments, void 0, function* (source, dest, options = {}) { @@ -20328,7 +20328,7 @@ var require_io = __commonJS({ if (destStat && destStat.isFile() && !force) { return; } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path3.join(dest, path3.basename(source)) : dest; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path4.join(dest, path4.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } @@ -20340,7 +20340,7 @@ var require_io = __commonJS({ yield cpDirRecursive(source, newDest, 0, force); } } else { - if (path3.relative(source, newDest) === "") { + if (path4.relative(source, newDest) === "") { throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile2(source, newDest, force); @@ -20352,7 +20352,7 @@ var require_io = __commonJS({ if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { - dest = path3.join(dest, path3.basename(source)); + dest = path4.join(dest, path4.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { @@ -20363,7 +20363,7 @@ var require_io = __commonJS({ } } } - yield mkdirP(path3.dirname(dest)); + yield mkdirP(path4.dirname(dest)); yield ioUtil.rename(source, dest); }); } @@ -20422,7 +20422,7 @@ var require_io = __commonJS({ } const extensions = []; if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path3.delimiter)) { + for (const extension of process.env["PATHEXT"].split(path4.delimiter)) { if (extension) { extensions.push(extension); } @@ -20435,12 +20435,12 @@ var require_io = __commonJS({ } return []; } - if (tool.includes(path3.sep)) { + if (tool.includes(path4.sep)) { return []; } const directories = []; if (process.env.PATH) { - for (const p of process.env.PATH.split(path3.delimiter)) { + for (const p of process.env.PATH.split(path4.delimiter)) { if (p) { directories.push(p); } @@ -20448,7 +20448,7 @@ var require_io = __commonJS({ } const matches = []; for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path3.join(directory, tool), extensions); + const filePath = yield ioUtil.tryGetExecutablePath(path4.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } @@ -20578,7 +20578,7 @@ var require_toolrunner = __commonJS({ var os2 = __importStar2(require("os")); var events = __importStar2(require("events")); var child = __importStar2(require("child_process")); - var path3 = __importStar2(require("path")); + var path4 = __importStar2(require("path")); var io4 = __importStar2(require_io()); var ioUtil = __importStar2(require_io_util()); var timers_1 = require("timers"); @@ -20793,7 +20793,7 @@ var require_toolrunner = __commonJS({ exec() { return __awaiter2(this, void 0, void 0, function* () { if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path3.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + this.toolPath = path4.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } this.toolPath = yield io4.which(this.toolPath, true); return new Promise((resolve2, reject) => __awaiter2(this, void 0, void 0, function* () { @@ -21346,7 +21346,7 @@ var require_core = __commonJS({ var file_command_1 = require_file_command(); var utils_1 = require_utils(); var os2 = __importStar2(require("os")); - var path3 = __importStar2(require("path")); + var path4 = __importStar2(require("path")); var oidc_utils_1 = require_oidc_utils(); var ExitCode; (function(ExitCode2) { @@ -21372,7 +21372,7 @@ var require_core = __commonJS({ } else { (0, command_1.issueCommand)("add-path", {}, inputPath); } - process.env["PATH"] = `${inputPath}${path3.delimiter}${process.env["PATH"]}`; + process.env["PATH"] = `${inputPath}${path4.delimiter}${process.env["PATH"]}`; } function getInput2(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; @@ -39293,8 +39293,8 @@ var require_context = __commonJS({ if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); } else { - const path3 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path3} does not exist${os_1.EOL}`); + const path4 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path4} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; @@ -40119,14 +40119,14 @@ var require_util10 = __commonJS({ } const port = url.port != null ? url.port : url.protocol === "https:" ? 443 : 80; let origin = url.origin != null ? url.origin : `${url.protocol || ""}//${url.hostname || ""}:${port}`; - let path3 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; + let path4 = url.path != null ? url.path : `${url.pathname || ""}${url.search || ""}`; if (origin[origin.length - 1] === "/") { origin = origin.slice(0, origin.length - 1); } - if (path3 && path3[0] !== "/") { - path3 = `/${path3}`; + if (path4 && path4[0] !== "/") { + path4 = `/${path4}`; } - return new URL(`${origin}${path3}`); + return new URL(`${origin}${path4}`); } if (!isHttpOrHttpsPrefixed(url.origin || url.protocol)) { throw new InvalidArgumentError("Invalid URL protocol: the URL must start with `http:` or `https:`."); @@ -40577,39 +40577,39 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path3); + debuglog("sending request to %s %s/%s", method, origin, path4); }); diagnosticsChannel.channel("undici:request:headers").subscribe((evt) => { const { - request: { method, path: path3, origin }, + request: { method, path: path4, origin }, response: { statusCode } } = evt; debuglog( "received response to %s %s/%s - HTTP %d", method, origin, - path3, + path4, statusCode ); }); diagnosticsChannel.channel("undici:request:trailers").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("trailers received from %s %s/%s", method, origin, path3); + debuglog("trailers received from %s %s/%s", method, origin, path4); }); diagnosticsChannel.channel("undici:request:error").subscribe((evt) => { const { - request: { method, path: path3, origin }, + request: { method, path: path4, origin }, error: error3 } = evt; debuglog( "request to %s %s/%s errored - %s", method, origin, - path3, + path4, error3.message ); }); @@ -40658,9 +40658,9 @@ var require_diagnostics2 = __commonJS({ }); diagnosticsChannel.channel("undici:client:sendHeaders").subscribe((evt) => { const { - request: { method, path: path3, origin } + request: { method, path: path4, origin } } = evt; - debuglog("sending request to %s %s/%s", method, origin, path3); + debuglog("sending request to %s %s/%s", method, origin, path4); }); } diagnosticsChannel.channel("undici:websocket:open").subscribe((evt) => { @@ -40723,7 +40723,7 @@ var require_request3 = __commonJS({ var kHandler = /* @__PURE__ */ Symbol("handler"); var Request = class { constructor(origin, { - path: path3, + path: path4, method, body, headers, @@ -40738,11 +40738,11 @@ var require_request3 = __commonJS({ expectContinue, servername }, handler2) { - if (typeof path3 !== "string") { + if (typeof path4 !== "string") { throw new InvalidArgumentError("path must be a string"); - } else if (path3[0] !== "/" && !(path3.startsWith("http://") || path3.startsWith("https://")) && method !== "CONNECT") { + } else if (path4[0] !== "/" && !(path4.startsWith("http://") || path4.startsWith("https://")) && method !== "CONNECT") { throw new InvalidArgumentError("path must be an absolute URL or start with a slash"); - } else if (invalidPathRegex.test(path3)) { + } else if (invalidPathRegex.test(path4)) { throw new InvalidArgumentError("invalid request path"); } if (typeof method !== "string") { @@ -40805,7 +40805,7 @@ var require_request3 = __commonJS({ this.completed = false; this.aborted = false; this.upgrade = upgrade || null; - this.path = query ? buildURL(path3, query) : path3; + this.path = query ? buildURL(path4, query) : path4; this.origin = origin; this.idempotent = idempotent == null ? method === "HEAD" || method === "GET" : idempotent; this.blocking = blocking == null ? false : blocking; @@ -45318,7 +45318,7 @@ var require_client_h12 = __commonJS({ return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } function writeH1(client, request2) { - const { method, path: path3, host, upgrade, blocking, reset } = request2; + const { method, path: path4, host, upgrade, blocking, reset } = request2; let { body, headers, contentLength } = request2; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH" || method === "QUERY" || method === "PROPFIND" || method === "PROPPATCH"; if (util.isFormDataLike(body)) { @@ -45384,7 +45384,7 @@ var require_client_h12 = __commonJS({ if (blocking) { socket[kBlocking] = true; } - let header = `${method} ${path3} HTTP/1.1\r + let header = `${method} ${path4} HTTP/1.1\r `; if (typeof host === "string") { header += `host: ${host}\r @@ -45910,7 +45910,7 @@ var require_client_h22 = __commonJS({ } function writeH2(client, request2) { const session = client[kHTTP2Session]; - const { method, path: path3, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { method, path: path4, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; let { body } = request2; if (upgrade) { util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); @@ -45977,7 +45977,7 @@ var require_client_h22 = __commonJS({ }); return true; } - headers[HTTP2_HEADER_PATH] = path3; + headers[HTTP2_HEADER_PATH] = path4; headers[HTTP2_HEADER_SCHEME] = "https"; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { @@ -46330,9 +46330,9 @@ var require_redirect_handler2 = __commonJS({ return this.handler.onHeaders(statusCode, headers, resume, statusText); } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))); - const path3 = search ? `${pathname}${search}` : pathname; + const path4 = search ? `${pathname}${search}` : pathname; this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin); - this.opts.path = path3; + this.opts.path = path4; this.opts.origin = origin; this.opts.maxRedirections = 0; this.opts.query = null; @@ -47566,10 +47566,10 @@ var require_proxy_agent2 = __commonJS({ }; const { origin, - path: path3 = "/", + path: path4 = "/", headers = {} } = opts; - opts.path = origin + path3; + opts.path = origin + path4; if (!("host" in headers) && !("Host" in headers)) { const { host } = new URL2(origin); headers.host = host; @@ -49490,20 +49490,20 @@ var require_mock_utils2 = __commonJS({ } return true; } - function safeUrl(path3) { - if (typeof path3 !== "string") { - return path3; + function safeUrl(path4) { + if (typeof path4 !== "string") { + return path4; } - const pathSegments = path3.split("?"); + const pathSegments = path4.split("?"); if (pathSegments.length !== 2) { - return path3; + return path4; } const qp = new URLSearchParams(pathSegments.pop()); qp.sort(); return [...pathSegments, qp.toString()].join("?"); } - function matchKey(mockDispatch2, { path: path3, method, body, headers }) { - const pathMatch = matchValue(mockDispatch2.path, path3); + function matchKey(mockDispatch2, { path: path4, method, body, headers }) { + const pathMatch = matchValue(mockDispatch2.path, path4); const methodMatch = matchValue(mockDispatch2.method, method); const bodyMatch = typeof mockDispatch2.body !== "undefined" ? matchValue(mockDispatch2.body, body) : true; const headersMatch = matchHeaders(mockDispatch2, headers); @@ -49525,7 +49525,7 @@ var require_mock_utils2 = __commonJS({ function getMockDispatch(mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path; const resolvedPath = typeof basePath === "string" ? safeUrl(basePath) : basePath; - let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path3 }) => matchValue(safeUrl(path3), resolvedPath)); + let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path: path4 }) => matchValue(safeUrl(path4), resolvedPath)); if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`); } @@ -49563,9 +49563,9 @@ var require_mock_utils2 = __commonJS({ } } function buildKey(opts) { - const { path: path3, method, body, headers, query } = opts; + const { path: path4, method, body, headers, query } = opts; return { - path: path3, + path: path4, method, body, headers, @@ -50028,10 +50028,10 @@ var require_pending_interceptors_formatter2 = __commonJS({ } format(pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( - ({ method, path: path3, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ + ({ method, path: path4, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, - Path: path3, + Path: path4, "Status code": statusCode, Persistent: persist ? PERSISTENT : NOT_PERSISTENT, Invocations: timesInvoked, @@ -54912,9 +54912,9 @@ var require_util15 = __commonJS({ } } } - function validateCookiePath(path3) { - for (let i = 0; i < path3.length; ++i) { - const code = path3.charCodeAt(i); + function validateCookiePath(path4) { + for (let i = 0; i < path4.length; ++i) { + const code = path4.charCodeAt(i); if (code < 32 || // exclude CTLs (0-31) code === 127 || // DEL code === 59) { @@ -57508,11 +57508,11 @@ var require_undici2 = __commonJS({ if (typeof opts.path !== "string") { throw new InvalidArgumentError("invalid opts.path"); } - let path3 = opts.path; + let path4 = opts.path; if (!opts.path.startsWith("/")) { - path3 = `/${path3}`; + path4 = `/${path4}`; } - url = new URL(util.parseOrigin(url).origin + path3); + url = new URL(util.parseOrigin(url).origin + path4); } else { if (!opts) { opts = typeof url === "object" ? url : {}; @@ -63873,1491 +63873,669 @@ var require_package = __commonJS({ } }); -// node_modules/@actions/tool-cache/lib/manifest.js -var require_manifest = __commonJS({ - "node_modules/@actions/tool-cache/lib/manifest.js"(exports2, module2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; +// node_modules/bottleneck/light.js +var require_light = __commonJS({ + "node_modules/bottleneck/light.js"(exports2, module2) { + (function(global2, factory) { + typeof exports2 === "object" && typeof module2 !== "undefined" ? module2.exports = factory() : typeof define === "function" && define.amd ? define(factory) : global2.Bottleneck = factory(); + })(exports2, (function() { + "use strict"; + var commonjsGlobal = typeof globalThis !== "undefined" ? globalThis : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : {}; + function getCjsExportFromNamespace(n) { + return n && n["default"] || n; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + var load2 = function(received, defaults, onto = {}) { + var k, ref, v; + for (k in defaults) { + v = defaults[k]; + onto[k] = (ref = received[k]) != null ? ref : v; } - __setModuleDefault2(result, mod); - return result; + return onto; }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + var overwrite = function(received, defaults, onto = {}) { + var k, v; + for (k in received) { + v = received[k]; + if (defaults[k] !== void 0) { + onto[k] = v; } } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + return onto; + }; + var parser = { + load: load2, + overwrite + }; + var DLList; + DLList = class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + push(value) { + var node; + this.length++; + if (typeof this.incr === "function") { + this.incr(); + } + node = { + value, + prev: this._last, + next: null + }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } else { + this._first = this._last = node; + } + return void 0; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2._findMatch = _findMatch; - exports2._getOsVersion = _getOsVersion; - exports2._readLinuxVersionFile = _readLinuxVersionFile; - var semver6 = __importStar2(require_semver2()); - var core_1 = require_core(); - var os2 = require("os"); - var cp = require("child_process"); - var fs = require("fs"); - function _findMatch(versionSpec, stable, candidates, archFilter) { - return __awaiter2(this, void 0, void 0, function* () { - const platFilter = os2.platform(); - let result; - let match; - let file; - for (const candidate of candidates) { - const version = candidate.version; - (0, core_1.debug)(`check ${version} satisfies ${versionSpec}`); - if (semver6.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) { - file = candidate.files.find((item) => { - (0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); - let chk = item.arch === archFilter && item.platform === platFilter; - if (chk && item.platform_version) { - const osVersion = module2.exports._getOsVersion(); - if (osVersion === item.platform_version) { - chk = true; - } else { - chk = semver6.satisfies(osVersion, item.platform_version); - } - } - return chk; - }); - if (file) { - (0, core_1.debug)(`matched ${candidate.version}`); - match = candidate; - break; + shift() { + var value; + if (this._first == null) { + return; + } else { + this.length--; + if (typeof this.decr === "function") { + this.decr(); } } + value = this._first.value; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } else { + this._last = null; + } + return value; } - if (match && file) { - result = Object.assign({}, match); - result.files = [file]; - } - return result; - }); - } - function _getOsVersion() { - const plat = os2.platform(); - let version = ""; - if (plat === "darwin") { - version = cp.execSync("sw_vers -productVersion").toString(); - } else if (plat === "linux") { - const lsbContents = module2.exports._readLinuxVersionFile(); - if (lsbContents) { - const lines = lsbContents.split("\n"); - for (const line of lines) { - const parts = line.split("="); - if (parts.length === 2 && (parts[0].trim() === "VERSION_ID" || parts[0].trim() === "DISTRIB_RELEASE")) { - version = parts[1].trim().replace(/^"/, "").replace(/"$/, ""); - break; - } + first() { + if (this._first != null) { + return this._first.value; } } - } - return version; - } - function _readLinuxVersionFile() { - const lsbReleaseFile = "/etc/lsb-release"; - const osReleaseFile = "/etc/os-release"; - let contents = ""; - if (fs.existsSync(lsbReleaseFile)) { - contents = fs.readFileSync(lsbReleaseFile).toString(); - } else if (fs.existsSync(osReleaseFile)) { - contents = fs.readFileSync(osReleaseFile).toString(); - } - return contents; - } - } -}); - -// node_modules/@actions/tool-cache/lib/retry-helper.js -var require_retry_helper = __commonJS({ - "node_modules/@actions/tool-cache/lib/retry-helper.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + getArray() { + var node, ref, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, ref.value)); + } + return results; } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + forEachShift(cb) { + var node; + node = this.shift(); + while (node != null) { + cb(node), node = this.shift(); } + return void 0; } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + debug() { + var node, ref, ref1, ref2, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, { + value: ref.value, + prev: (ref1 = ref.prev) != null ? ref1.value : void 0, + next: (ref2 = ref.next) != null ? ref2.value : void 0 + })); } + return results; } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + }; + var DLList_1 = DLList; + var Events; + Events = class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => { + return this._addListener(name, "many", cb); + }; + this.instance.once = (name, cb) => { + return this._addListener(name, "once", cb); + }; + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + return delete this._events[name]; + } else { + return this._events = {}; + } + }; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RetryHelper = void 0; - var core12 = __importStar2(require_core()); - var RetryHelper = class { - constructor(maxAttempts, minSeconds, maxSeconds) { - if (maxAttempts < 1) { - throw new Error("max attempts should be greater than or equal to 1"); + _addListener(name, status, cb) { + var base; + if ((base = this._events)[name] == null) { + base[name] = []; + } + this._events[name].push({ cb, status }); + return this.instance; } - this.maxAttempts = maxAttempts; - this.minSeconds = Math.floor(minSeconds); - this.maxSeconds = Math.floor(maxSeconds); - if (this.minSeconds > this.maxSeconds) { - throw new Error("min seconds should be less than or equal to max seconds"); + listenerCount(name) { + if (this._events[name] != null) { + return this._events[name].length; + } else { + return 0; + } } - } - execute(action, isRetryable) { - return __awaiter2(this, void 0, void 0, function* () { - let attempt = 1; - while (attempt < this.maxAttempts) { - try { - return yield action(); - } catch (err) { - if (isRetryable && !isRetryable(err)) { - throw err; + async trigger(name, ...args) { + var e, promises; + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) { + return; + } + this._events[name] = this._events[name].filter(function(listener) { + return listener.status !== "none"; + }); + promises = this._events[name].map(async (listener) => { + var e2, returned; + if (listener.status === "none") { + return; } - core12.info(err.message); + if (listener.status === "once") { + listener.status = "none"; + } + try { + returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; + if (typeof (returned != null ? returned.then : void 0) === "function") { + return await returned; + } else { + return returned; + } + } catch (error3) { + e2 = error3; + { + this.trigger("error", e2); + } + return null; + } + }); + return (await Promise.all(promises)).find(function(x) { + return x != null; + }); + } catch (error3) { + e = error3; + { + this.trigger("error", e); } - const seconds = this.getSleepAmount(); - core12.info(`Waiting ${seconds} seconds before trying again`); - yield this.sleep(seconds); - attempt++; + return null; } - return yield action(); - }); - } - getSleepAmount() { - return Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + this.minSeconds; - } - sleep(seconds) { - return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve2) => setTimeout(resolve2, seconds * 1e3)); - }); - } - }; - exports2.RetryHelper = RetryHelper; - } -}); - -// node_modules/@actions/tool-cache/lib/tool-cache.js -var require_tool_cache = __commonJS({ - "node_modules/@actions/tool-cache/lib/tool-cache.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - __setModuleDefault2(result, mod); - return result; }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + var Events_1 = Events; + var DLList$1, Events$1, Queues; + DLList$1 = DLList_1; + Events$1 = Events_1; + Queues = class Queues { + constructor(num_priorities) { + var i; + this.Events = new Events$1(this); + this._length = 0; + this._lists = (function() { + var j, ref, results; + results = []; + for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) { + results.push(new DLList$1((() => { + return this.incr(); + }), (() => { + return this.decr(); + }))); + } + return results; + }).call(this); + } + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); } } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); } } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + push(job) { + return this._lists[job.options.priority].push(job); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.HTTPError = void 0; - exports2.downloadTool = downloadTool2; - exports2.extract7z = extract7z; - exports2.extractTar = extractTar2; - exports2.extractXar = extractXar; - exports2.extractZip = extractZip; - exports2.cacheDir = cacheDir2; - exports2.cacheFile = cacheFile; - exports2.find = find2; - exports2.findAllVersions = findAllVersions; - exports2.getManifestFromRepo = getManifestFromRepo; - exports2.findFromManifest = findFromManifest; - exports2.isExplicitVersion = isExplicitVersion; - exports2.evaluateVersions = evaluateVersions; - var core12 = __importStar2(require_core()); - var io4 = __importStar2(require_io()); - var crypto2 = __importStar2(require("crypto")); - var fs = __importStar2(require("fs")); - var mm = __importStar2(require_manifest()); - var os2 = __importStar2(require("os")); - var path3 = __importStar2(require("path")); - var httpm = __importStar2(require_lib()); - var semver6 = __importStar2(require_semver2()); - var stream = __importStar2(require("stream")); - var util = __importStar2(require("util")); - var assert_1 = require("assert"); - var exec_1 = require_exec(); - var retry_helper_1 = require_retry_helper(); - var HTTPError2 = class extends Error { - constructor(httpStatusCode) { - super(`Unexpected HTTP response: ${httpStatusCode}`); - this.httpStatusCode = httpStatusCode; - Object.setPrototypeOf(this, new.target.prototype); - } - }; - exports2.HTTPError = HTTPError2; - var IS_WINDOWS = process.platform === "win32"; - var IS_MAC = process.platform === "darwin"; - var userAgent2 = "actions/tool-cache"; - function downloadTool2(url, dest, auth2, headers) { - return __awaiter2(this, void 0, void 0, function* () { - dest = dest || path3.join(_getTempDirectory(), crypto2.randomUUID()); - yield io4.mkdirP(path3.dirname(dest)); - core12.debug(`Downloading ${url}`); - core12.debug(`Destination ${dest}`); - const maxAttempts = 3; - const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); - const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); - const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); - return yield retryHelper.execute(() => __awaiter2(this, void 0, void 0, function* () { - return yield downloadToolAttempt(url, dest || "", auth2, headers); - }), (err) => { - if (err instanceof HTTPError2 && err.httpStatusCode) { - if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) { - return false; + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } else { + return this._length; + } + } + shiftAll(fn) { + return this._lists.forEach(function(list) { + return list.forEachShift(fn); + }); + } + getFirst(arr = this._lists) { + var j, len, list; + for (j = 0, len = arr.length; j < len; j++) { + list = arr[j]; + if (list.length > 0) { + return list; } } - return true; - }); - }); - } - function downloadToolAttempt(url, dest, auth2, headers) { - return __awaiter2(this, void 0, void 0, function* () { - if (fs.existsSync(dest)) { - throw new Error(`Destination file path ${dest} already exists`); + return []; } - const http = new httpm.HttpClient(userAgent2, [], { - allowRetries: false - }); - if (auth2) { - core12.debug("set auth"); - if (headers === void 0) { - headers = {}; + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } + }; + var Queues_1 = Queues; + var BottleneckError; + BottleneckError = class BottleneckError extends Error { + }; + var BottleneckError_1 = BottleneckError; + var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; + NUM_PRIORITIES = 10; + DEFAULT_PRIORITY = 5; + parser$1 = parser; + BottleneckError$1 = BottleneckError_1; + Job = class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events2, _states, Promise2) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events2; + this._states = _states; + this.Promise = Promise2; + this.options = parser$1.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; } - headers.authorization = auth2; + this.promise = new this.Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; } - const response = yield http.get(url, headers); - if (response.message.statusCode !== 200) { - const err = new HTTPError2(response.message.statusCode); - core12.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); - throw err; + _sanitizePriority(priority) { + var sProperty; + sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } else { + return sProperty; + } } - const pipeline = util.promisify(stream.pipeline); - const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); - const readStream = responseMessageFactory(); - let succeeded = false; - try { - yield pipeline(readStream, fs.createWriteStream(dest)); - core12.debug("download complete"); - succeeded = true; - return dest; - } finally { - if (!succeeded) { - core12.debug("download failed"); - try { - yield io4.rmRF(dest); - } catch (err) { - core12.debug(`Failed to delete '${dest}'. ${err.message}`); + _randomIndex() { + return Math.random().toString(36).slice(2); + } + doDrop({ error: error3, message = "This job has been dropped by Bottleneck" } = {}) { + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error3 != null ? error3 : new BottleneckError$1(message)); } + this.Events.trigger("dropped", { args: this.args, options: this.options, task: this.task, promise: this.promise }); + return true; + } else { + return false; } } - }); - } - function extract7z(file, dest, _7zPath) { - return __awaiter2(this, void 0, void 0, function* () { - (0, assert_1.ok)(IS_WINDOWS, "extract7z() not supported on current OS"); - (0, assert_1.ok)(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - const originalCwd = process.cwd(); - process.chdir(dest); - if (_7zPath) { - try { - const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; - const args = [ - "x", - // eXtract files with full paths - logLevel, - // -bb[0-3] : set output log level - "-bd", - // disable progress indicator - "-sccUTF-8", - // set charset for for console input/output - file - ]; - const options = { - silent: true - }; - yield (0, exec_1.exec)(`"${_7zPath}"`, args, options); - } finally { - process.chdir(originalCwd); + _assertStatus(expected) { + var status; + status = this._states.jobStatus(this.options.id); + if (!(status === expected || expected === "DONE" && status === null)) { + throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); } - } else { - const escapedScript = path3.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; - const args = [ - "-NoLogo", - "-Sta", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - command - ]; - const options = { - silent: true - }; + } + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", { args: this.args, options: this.options }); + } + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", { args: this.args, options: this.options, reachedHWM, blocked }); + } + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", { args: this.args, options: this.options }); + } + async doExecute(chained, clearGlobalState, run2, free) { + var error3, eventInfo, passed; + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + this.Events.trigger("executing", eventInfo); try { - const powershellPath = yield io4.which("powershell", true); - yield (0, exec_1.exec)(`"${powershellPath}"`, args, options); - } finally { - process.chdir(originalCwd); + passed = await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)); + if (clearGlobalState()) { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } catch (error1) { + error3 = error1; + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } } - return dest; - }); - } - function extractTar2(file_1, dest_1) { - return __awaiter2(this, arguments, void 0, function* (file, dest, flags = "xz") { - if (!file) { - throw new Error("parameter 'file' is required"); + doExpire(clearGlobalState, run2, free) { + var error3, eventInfo; + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; + error3 = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); } - dest = yield _createExtractFolder(dest); - core12.debug("Checking tar --version"); - let versionOutput = ""; - yield (0, exec_1.exec)("tar --version", [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => versionOutput += data.toString(), - stderr: (data) => versionOutput += data.toString() + async _onFailure(error3, eventInfo, clearGlobalState, run2, free) { + var retry2, retryAfter; + if (clearGlobalState()) { + retry2 = await this.Events.trigger("failed", error3, eventInfo); + if (retry2 != null) { + retryAfter = ~~retry2; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run2(retryAfter); + } else { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error3); + } } - }); - core12.debug(versionOutput.trim()); - const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); - let args; - if (flags instanceof Array) { - args = flags; - } else { - args = [flags]; } - if (core12.isDebug() && !flags.includes("v")) { - args.push("-v"); + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); } - let destArg = dest; - let fileArg = file; - if (IS_WINDOWS && isGnuTar) { - args.push("--force-local"); - destArg = dest.replace(/\\/g, "/"); - fileArg = file.replace(/\\/g, "/"); + }; + var Job_1 = Job; + var BottleneckError$2, LocalDatastore, parser$2; + parser$2 = parser; + BottleneckError$2 = BottleneckError_1; + LocalDatastore = class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser$2.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = this.Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); } - if (isGnuTar) { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); + _startHeartbeat() { + var base; + if (this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null)) { + return typeof (base = this.heartbeat = setInterval(() => { + var amount, incr, maximum, now, reservoir; + now = Date.now(); + if (this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if (this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + ({ + reservoirIncreaseAmount: amount, + reservoirIncreaseMaximum: maximum, + reservoir + } = this.storeOptions); + this._lastReservoirIncrease = now; + incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0; + } else { + return clearInterval(this.heartbeat); + } } - args.push("-C", destArg, "-f", fileArg); - yield (0, exec_1.exec)(`tar`, args); - return dest; - }); - } - function extractXar(file_1, dest_1) { - return __awaiter2(this, arguments, void 0, function* (file, dest, flags = []) { - (0, assert_1.ok)(IS_MAC, "extractXar() not supported on current OS"); - (0, assert_1.ok)(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - let args; - if (flags instanceof Array) { - args = flags; - } else { - args = [flags]; + async __publish__(message) { + await this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); } - args.push("-x", "-C", dest, "-f", file); - if (core12.isDebug()) { - args.push("-v"); + async __disconnect__(flush) { + await this.yieldLoop(); + clearInterval(this.heartbeat); + return this.Promise.resolve(); } - const xarPath = yield io4.which("xar", true); - yield (0, exec_1.exec)(`"${xarPath}"`, _unique(args)); - return dest; - }); - } - function extractZip(file, dest) { - return __awaiter2(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); + yieldLoop(t = 0) { + return new this.Promise(function(resolve2, reject) { + return setTimeout(resolve2, t); + }); } - dest = yield _createExtractFolder(dest); - if (IS_WINDOWS) { - yield extractZipWin(file, dest); - } else { - yield extractZipNix(file, dest); + computePenalty() { + var ref; + return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5e3; } - return dest; - }); - } - function extractZipWin(file, dest) { - return __awaiter2(this, void 0, void 0, function* () { - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const pwshPath = yield io4.which("pwsh", false); - if (pwshPath) { - const pwshCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, - `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, - `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` - ].join(" "); - const args = [ - "-NoLogo", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - pwshCommand - ]; - core12.debug(`Using pwsh at path: ${pwshPath}`); - yield (0, exec_1.exec)(`"${pwshPath}"`, args); - } else { - const powershellCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, - `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, - `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` - ].join(" "); - const args = [ - "-NoLogo", - "-Sta", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - powershellCommand - ]; - const powershellPath = yield io4.which("powershell", true); - core12.debug(`Using powershell at path: ${powershellPath}`); - yield (0, exec_1.exec)(`"${powershellPath}"`, args); + async __updateSettings__(options) { + await this.yieldLoop(); + parser$2.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; } - }); - } - function extractZipNix(file, dest) { - return __awaiter2(this, void 0, void 0, function* () { - const unzipPath = yield io4.which("unzip", true); - const args = [file]; - if (!core12.isDebug()) { - args.unshift("-q"); + async __running__() { + await this.yieldLoop(); + return this._running; } - args.unshift("-o"); - yield (0, exec_1.exec)(`"${unzipPath}"`, args, { cwd: dest }); - }); - } - function cacheDir2(sourceDir, tool, version, arch) { - return __awaiter2(this, void 0, void 0, function* () { - version = semver6.clean(version) || version; - arch = arch || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch}`); - core12.debug(`source dir: ${sourceDir}`); - if (!fs.statSync(sourceDir).isDirectory()) { - throw new Error("sourceDir is not a directory"); + async __queued__() { + await this.yieldLoop(); + return this.instance.queued(); } - const destPath = yield _createToolPath(tool, version, arch); - for (const itemName of fs.readdirSync(sourceDir)) { - const s = path3.join(sourceDir, itemName); - yield io4.cp(s, destPath, { recursive: true }); + async __done__() { + await this.yieldLoop(); + return this._done; } - _completeToolPath(tool, version, arch); - return destPath; - }); - } - function cacheFile(sourceFile, targetFile, tool, version, arch) { - return __awaiter2(this, void 0, void 0, function* () { - version = semver6.clean(version) || version; - arch = arch || os2.arch(); - core12.debug(`Caching tool ${tool} ${version} ${arch}`); - core12.debug(`source file: ${sourceFile}`); - if (!fs.statSync(sourceFile).isFile()) { - throw new Error("sourceFile is not a file"); + async __groupCheck__(time) { + await this.yieldLoop(); + return this._nextRequest + this.timeout < time; } - const destFolder = yield _createToolPath(tool, version, arch); - const destPath = path3.join(destFolder, targetFile); - core12.debug(`destination file ${destPath}`); - yield io4.cp(sourceFile, destPath); - _completeToolPath(tool, version, arch); - return destFolder; - }); - } - function find2(toolName, versionSpec, arch) { - if (!toolName) { - throw new Error("toolName parameter is required"); - } - if (!versionSpec) { - throw new Error("versionSpec parameter is required"); - } - arch = arch || os2.arch(); - if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions(toolName, arch); - const match = evaluateVersions(localVersions, versionSpec); - versionSpec = match; - } - let toolPath = ""; - if (versionSpec) { - versionSpec = semver6.clean(versionSpec) || ""; - const cachePath = path3.join(_getCacheDirectory(), toolName, versionSpec, arch); - core12.debug(`checking cache: ${cachePath}`); - if (fs.existsSync(cachePath) && fs.existsSync(`${cachePath}.complete`)) { - core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); - toolPath = cachePath; - } else { - core12.debug("not found"); - } - } - return toolPath; - } - function findAllVersions(toolName, arch) { - const versions = []; - arch = arch || os2.arch(); - const toolPath = path3.join(_getCacheDirectory(), toolName); - if (fs.existsSync(toolPath)) { - const children = fs.readdirSync(toolPath); - for (const child of children) { - if (isExplicitVersion(child)) { - const fullPath = path3.join(toolPath, child, arch || ""); - if (fs.existsSync(fullPath) && fs.existsSync(`${fullPath}.complete`)) { - versions.push(child); - } - } - } - } - return versions; - } - function getManifestFromRepo(owner_1, repo_1, auth_1) { - return __awaiter2(this, arguments, void 0, function* (owner, repo, auth2, branch = "master") { - let releases = []; - const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; - const http = new httpm.HttpClient("tool-cache"); - const headers = {}; - if (auth2) { - core12.debug("set auth"); - headers.authorization = auth2; - } - const response = yield http.getJson(treeUrl, headers); - if (!response.result) { - return releases; - } - let manifestUrl = ""; - for (const item of response.result.tree) { - if (item.path === "versions-manifest.json") { - manifestUrl = item.url; - break; - } - } - headers["accept"] = "application/vnd.github.VERSION.raw"; - let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); - if (versionsRaw) { - versionsRaw = versionsRaw.replace(/^\uFEFF/, ""); - try { - releases = JSON.parse(versionsRaw); - } catch (_a) { - core12.debug("Invalid json"); + computeCapacity() { + var maxConcurrent, reservoir; + ({ maxConcurrent, reservoir } = this.storeOptions); + if (maxConcurrent != null && reservoir != null) { + return Math.min(maxConcurrent - this._running, reservoir); + } else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } else if (reservoir != null) { + return reservoir; + } else { + return null; } } - return releases; - }); - } - function findFromManifest(versionSpec_1, stable_1, manifest_1) { - return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os2.arch()) { - const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); - return match; - }); - } - function _createExtractFolder(dest) { - return __awaiter2(this, void 0, void 0, function* () { - if (!dest) { - dest = path3.join(_getTempDirectory(), crypto2.randomUUID()); - } - yield io4.mkdirP(dest); - return dest; - }); - } - function _createToolPath(tool, version, arch) { - return __awaiter2(this, void 0, void 0, function* () { - const folderPath = path3.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); - core12.debug(`destination ${folderPath}`); - const markerPath = `${folderPath}.complete`; - yield io4.rmRF(folderPath); - yield io4.rmRF(markerPath); - yield io4.mkdirP(folderPath); - return folderPath; - }); - } - function _completeToolPath(tool, version, arch) { - const folderPath = path3.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); - const markerPath = `${folderPath}.complete`; - fs.writeFileSync(markerPath, ""); - core12.debug("finished caching tool"); - } - function isExplicitVersion(versionSpec) { - const c = semver6.clean(versionSpec) || ""; - core12.debug(`isExplicit: ${c}`); - const valid2 = semver6.valid(c) != null; - core12.debug(`explicit? ${valid2}`); - return valid2; - } - function evaluateVersions(versions, versionSpec) { - let version = ""; - core12.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver6.gt(a, b)) { - return 1; + conditionsCheck(weight) { + var capacity; + capacity = this.computeCapacity(); + return capacity == null || weight <= capacity; } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver6.satisfies(potential, versionSpec); - if (satisfied) { - version = potential; - break; + async __incrementReservoir__(incr) { + var reservoir; + await this.yieldLoop(); + reservoir = this.storeOptions.reservoir += incr; + this.instance._drainAll(this.computeCapacity()); + return reservoir; } - } - if (version) { - core12.debug(`matched: ${version}`); - } else { - core12.debug("match not found"); - } - return version; - } - function _getCacheDirectory() { - const cacheDirectory = process.env["RUNNER_TOOL_CACHE"] || ""; - (0, assert_1.ok)(cacheDirectory, "Expected RUNNER_TOOL_CACHE to be defined"); - return cacheDirectory; - } - function _getTempDirectory() { - const tempDirectory = process.env["RUNNER_TEMP"] || ""; - (0, assert_1.ok)(tempDirectory, "Expected RUNNER_TEMP to be defined"); - return tempDirectory; - } - function _getGlobal(key, defaultValue) { - const value = global[key]; - return value !== void 0 ? value : defaultValue; - } - function _unique(values) { - return Array.from(new Set(values)); - } - } -}); - -// node_modules/bottleneck/light.js -var require_light = __commonJS({ - "node_modules/bottleneck/light.js"(exports2, module2) { - (function(global2, factory) { - typeof exports2 === "object" && typeof module2 !== "undefined" ? module2.exports = factory() : typeof define === "function" && define.amd ? define(factory) : global2.Bottleneck = factory(); - })(exports2, (function() { - "use strict"; - var commonjsGlobal = typeof globalThis !== "undefined" ? globalThis : typeof window !== "undefined" ? window : typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : {}; - function getCjsExportFromNamespace(n) { - return n && n["default"] || n; - } - var load2 = function(received, defaults, onto = {}) { - var k, ref, v; - for (k in defaults) { - v = defaults[k]; - onto[k] = (ref = received[k]) != null ? ref : v; + async __currentReservoir__() { + await this.yieldLoop(); + return this.storeOptions.reservoir; } - return onto; - }; - var overwrite = function(received, defaults, onto = {}) { - var k, v; - for (k in received) { - v = received[k]; - if (defaults[k] !== void 0) { - onto[k] = v; - } + isBlocked(now) { + return this._unblockTime >= now; } - return onto; - }; - var parser = { - load: load2, - overwrite - }; - var DLList; - DLList = class DLList { - constructor(incr, decr) { - this.incr = incr; - this.decr = decr; - this._first = null; - this._last = null; - this.length = 0; + check(weight, now) { + return this.conditionsCheck(weight) && this._nextRequest - now <= 0; } - push(value) { - var node; - this.length++; - if (typeof this.incr === "function") { - this.incr(); - } - node = { - value, - prev: this._last, - next: null - }; - if (this._last != null) { - this._last.next = node; - this._last = node; - } else { - this._first = this._last = node; - } - return void 0; + async __check__(weight) { + var now; + await this.yieldLoop(); + now = Date.now(); + return this.check(weight, now); } - shift() { - var value; - if (this._first == null) { - return; - } else { - this.length--; - if (typeof this.decr === "function") { - this.decr(); + async __register__(index, weight, expiration) { + var now, wait; + await this.yieldLoop(); + now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; } - } - value = this._first.value; - if ((this._first = this._first.next) != null) { - this._first.prev = null; + wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { + success: true, + wait, + reservoir: this.storeOptions.reservoir + }; } else { - this._last = null; + return { + success: false + }; } - return value; } - first() { - if (this._first != null) { - return this._first.value; - } + strategyIsBlock() { + return this.storeOptions.strategy === 3; } - getArray() { - var node, ref, results; - node = this._first; - results = []; - while (node != null) { - results.push((ref = node, node = node.next, ref.value)); + async __submit__(queueLength, weight) { + var blocked, now, reachedHWM; + await this.yieldLoop(); + if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); } - return results; - } - forEachShift(cb) { - var node; - node = this.shift(); - while (node != null) { - cb(node), node = this.shift(); + now = Date.now(); + reachedHWM = this.storeOptions.highWater != null && queueLength === this.storeOptions.highWater && !this.check(weight, now); + blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); } - return void 0; + return { + reachedHWM, + blocked, + strategy: this.storeOptions.strategy + }; } - debug() { - var node, ref, ref1, ref2, results; - node = this._first; - results = []; - while (node != null) { - results.push((ref = node, node = node.next, { - value: ref.value, - prev: (ref1 = ref.prev) != null ? ref1.value : void 0, - next: (ref2 = ref.next) != null ? ref2.value : void 0 - })); - } - return results; + async __free__(index, weight) { + await this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { + running: this._running + }; } }; - var DLList_1 = DLList; - var Events; - Events = class Events { - constructor(instance) { - this.instance = instance; - this._events = {}; - if (this.instance.on != null || this.instance.once != null || this.instance.removeAllListeners != null) { - throw new Error("An Emitter already exists for this object"); - } - this.instance.on = (name, cb) => { - return this._addListener(name, "many", cb); - }; - this.instance.once = (name, cb) => { - return this._addListener(name, "once", cb); - }; - this.instance.removeAllListeners = (name = null) => { - if (name != null) { - return delete this._events[name]; - } else { - return this._events = {}; - } - }; + var LocalDatastore_1 = LocalDatastore; + var BottleneckError$3, States; + BottleneckError$3 = BottleneckError_1; + States = class States { + constructor(status1) { + this.status = status1; + this._jobs = {}; + this.counts = this.status.map(function() { + return 0; + }); } - _addListener(name, status, cb) { - var base; - if ((base = this._events)[name] == null) { - base[name] = []; + next(id) { + var current, next; + current = this._jobs[id]; + next = current + 1; + if (current != null && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + return this._jobs[id]++; + } else if (current != null) { + this.counts[current]--; + return delete this._jobs[id]; } - this._events[name].push({ cb, status }); - return this.instance; } - listenerCount(name) { - if (this._events[name] != null) { - return this._events[name].length; - } else { - return 0; - } + start(id) { + var initial; + initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; } - async trigger(name, ...args) { - var e, promises; - try { - if (name !== "debug") { - this.trigger("debug", `Event triggered: ${name}`, args); - } - if (this._events[name] == null) { - return; - } - this._events[name] = this._events[name].filter(function(listener) { - return listener.status !== "none"; - }); - promises = this._events[name].map(async (listener) => { - var e2, returned; - if (listener.status === "none") { - return; - } - if (listener.status === "once") { - listener.status = "none"; - } - try { - returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; - if (typeof (returned != null ? returned.then : void 0) === "function") { - return await returned; - } else { - return returned; - } - } catch (error3) { - e2 = error3; - { - this.trigger("error", e2); - } - return null; - } - }); - return (await Promise.all(promises)).find(function(x) { - return x != null; - }); - } catch (error3) { - e = error3; - { - this.trigger("error", e); - } - return null; + remove(id) { + var current; + current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; } + return current != null; } - }; - var Events_1 = Events; - var DLList$1, Events$1, Queues; - DLList$1 = DLList_1; - Events$1 = Events_1; - Queues = class Queues { - constructor(num_priorities) { - var i; - this.Events = new Events$1(this); - this._length = 0; - this._lists = (function() { - var j, ref, results; + jobStatus(id) { + var ref; + return (ref = this.status[this._jobs[id]]) != null ? ref : null; + } + statusJobs(status) { + var k, pos, ref, results, v; + if (status != null) { + pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError$3(`status must be one of ${this.status.join(", ")}`); + } + ref = this._jobs; results = []; - for (i = j = 1, ref = num_priorities; 1 <= ref ? j <= ref : j >= ref; i = 1 <= ref ? ++j : --j) { - results.push(new DLList$1((() => { - return this.incr(); - }), (() => { - return this.decr(); - }))); + for (k in ref) { + v = ref[k]; + if (v === pos) { + results.push(k); + } } return results; - }).call(this); - } - incr() { - if (this._length++ === 0) { - return this.Events.trigger("leftzero"); - } - } - decr() { - if (--this._length === 0) { - return this.Events.trigger("zero"); - } - } - push(job) { - return this._lists[job.options.priority].push(job); - } - queued(priority) { - if (priority != null) { - return this._lists[priority].length; } else { - return this._length; + return Object.keys(this._jobs); } } - shiftAll(fn) { - return this._lists.forEach(function(list) { - return list.forEachShift(fn); - }); + statusCounts() { + return this.counts.reduce(((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }), {}); } - getFirst(arr = this._lists) { - var j, len, list; - for (j = 0, len = arr.length; j < len; j++) { - list = arr[j]; - if (list.length > 0) { - return list; - } - } - return []; + }; + var States_1 = States; + var DLList$2, Sync; + DLList$2 = DLList_1; + Sync = class Sync { + constructor(name, Promise2) { + this.schedule = this.schedule.bind(this); + this.name = name; + this.Promise = Promise2; + this._running = 0; + this._queue = new DLList$2(); } - shiftLastFrom(priority) { - return this.getFirst(this._lists.slice(priority).reverse()).shift(); - } - }; - var Queues_1 = Queues; - var BottleneckError; - BottleneckError = class BottleneckError extends Error { - }; - var BottleneckError_1 = BottleneckError; - var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; - NUM_PRIORITIES = 10; - DEFAULT_PRIORITY = 5; - parser$1 = parser; - BottleneckError$1 = BottleneckError_1; - Job = class Job { - constructor(task, args, options, jobDefaults, rejectOnDrop, Events2, _states, Promise2) { - this.task = task; - this.args = args; - this.rejectOnDrop = rejectOnDrop; - this.Events = Events2; - this._states = _states; - this.Promise = Promise2; - this.options = parser$1.load(options, jobDefaults); - this.options.priority = this._sanitizePriority(this.options.priority); - if (this.options.id === jobDefaults.id) { - this.options.id = `${this.options.id}-${this._randomIndex()}`; - } - this.promise = new this.Promise((_resolve, _reject) => { - this._resolve = _resolve; - this._reject = _reject; - }); - this.retryCount = 0; - } - _sanitizePriority(priority) { - var sProperty; - sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; - if (sProperty < 0) { - return 0; - } else if (sProperty > NUM_PRIORITIES - 1) { - return NUM_PRIORITIES - 1; - } else { - return sProperty; - } - } - _randomIndex() { - return Math.random().toString(36).slice(2); - } - doDrop({ error: error3, message = "This job has been dropped by Bottleneck" } = {}) { - if (this._states.remove(this.options.id)) { - if (this.rejectOnDrop) { - this._reject(error3 != null ? error3 : new BottleneckError$1(message)); - } - this.Events.trigger("dropped", { args: this.args, options: this.options, task: this.task, promise: this.promise }); - return true; - } else { - return false; - } - } - _assertStatus(expected) { - var status; - status = this._states.jobStatus(this.options.id); - if (!(status === expected || expected === "DONE" && status === null)) { - throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); - } - } - doReceive() { - this._states.start(this.options.id); - return this.Events.trigger("received", { args: this.args, options: this.options }); - } - doQueue(reachedHWM, blocked) { - this._assertStatus("RECEIVED"); - this._states.next(this.options.id); - return this.Events.trigger("queued", { args: this.args, options: this.options, reachedHWM, blocked }); - } - doRun() { - if (this.retryCount === 0) { - this._assertStatus("QUEUED"); - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - return this.Events.trigger("scheduled", { args: this.args, options: this.options }); - } - async doExecute(chained, clearGlobalState, run2, free) { - var error3, eventInfo, passed; - if (this.retryCount === 0) { - this._assertStatus("RUNNING"); - this._states.next(this.options.id); - } else { - this._assertStatus("EXECUTING"); - } - eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; - this.Events.trigger("executing", eventInfo); - try { - passed = await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args)); - if (clearGlobalState()) { - this.doDone(eventInfo); - await free(this.options, eventInfo); - this._assertStatus("DONE"); - return this._resolve(passed); - } - } catch (error1) { - error3 = error1; - return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); - } - } - doExpire(clearGlobalState, run2, free) { - var error3, eventInfo; - if (this._states.jobStatus(this.options.id === "RUNNING")) { - this._states.next(this.options.id); - } - this._assertStatus("EXECUTING"); - eventInfo = { args: this.args, options: this.options, retryCount: this.retryCount }; - error3 = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); - return this._onFailure(error3, eventInfo, clearGlobalState, run2, free); - } - async _onFailure(error3, eventInfo, clearGlobalState, run2, free) { - var retry2, retryAfter; - if (clearGlobalState()) { - retry2 = await this.Events.trigger("failed", error3, eventInfo); - if (retry2 != null) { - retryAfter = ~~retry2; - this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); - this.retryCount++; - return run2(retryAfter); - } else { - this.doDone(eventInfo); - await free(this.options, eventInfo); - this._assertStatus("DONE"); - return this._reject(error3); - } - } - } - doDone(eventInfo) { - this._assertStatus("EXECUTING"); - this._states.next(this.options.id); - return this.Events.trigger("done", eventInfo); - } - }; - var Job_1 = Job; - var BottleneckError$2, LocalDatastore, parser$2; - parser$2 = parser; - BottleneckError$2 = BottleneckError_1; - LocalDatastore = class LocalDatastore { - constructor(instance, storeOptions, storeInstanceOptions) { - this.instance = instance; - this.storeOptions = storeOptions; - this.clientId = this.instance._randomIndex(); - parser$2.load(storeInstanceOptions, storeInstanceOptions, this); - this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); - this._running = 0; - this._done = 0; - this._unblockTime = 0; - this.ready = this.Promise.resolve(); - this.clients = {}; - this._startHeartbeat(); - } - _startHeartbeat() { - var base; - if (this.heartbeat == null && (this.storeOptions.reservoirRefreshInterval != null && this.storeOptions.reservoirRefreshAmount != null || this.storeOptions.reservoirIncreaseInterval != null && this.storeOptions.reservoirIncreaseAmount != null)) { - return typeof (base = this.heartbeat = setInterval(() => { - var amount, incr, maximum, now, reservoir; - now = Date.now(); - if (this.storeOptions.reservoirRefreshInterval != null && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { - this._lastReservoirRefresh = now; - this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; - this.instance._drainAll(this.computeCapacity()); - } - if (this.storeOptions.reservoirIncreaseInterval != null && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { - ({ - reservoirIncreaseAmount: amount, - reservoirIncreaseMaximum: maximum, - reservoir - } = this.storeOptions); - this._lastReservoirIncrease = now; - incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; - if (incr > 0) { - this.storeOptions.reservoir += incr; - return this.instance._drainAll(this.computeCapacity()); - } - } - }, this.heartbeatInterval)).unref === "function" ? base.unref() : void 0; - } else { - return clearInterval(this.heartbeat); - } - } - async __publish__(message) { - await this.yieldLoop(); - return this.instance.Events.trigger("message", message.toString()); - } - async __disconnect__(flush) { - await this.yieldLoop(); - clearInterval(this.heartbeat); - return this.Promise.resolve(); - } - yieldLoop(t = 0) { - return new this.Promise(function(resolve2, reject) { - return setTimeout(resolve2, t); - }); - } - computePenalty() { - var ref; - return (ref = this.storeOptions.penalty) != null ? ref : 15 * this.storeOptions.minTime || 5e3; - } - async __updateSettings__(options) { - await this.yieldLoop(); - parser$2.overwrite(options, options, this.storeOptions); - this._startHeartbeat(); - this.instance._drainAll(this.computeCapacity()); - return true; - } - async __running__() { - await this.yieldLoop(); - return this._running; - } - async __queued__() { - await this.yieldLoop(); - return this.instance.queued(); - } - async __done__() { - await this.yieldLoop(); - return this._done; - } - async __groupCheck__(time) { - await this.yieldLoop(); - return this._nextRequest + this.timeout < time; - } - computeCapacity() { - var maxConcurrent, reservoir; - ({ maxConcurrent, reservoir } = this.storeOptions); - if (maxConcurrent != null && reservoir != null) { - return Math.min(maxConcurrent - this._running, reservoir); - } else if (maxConcurrent != null) { - return maxConcurrent - this._running; - } else if (reservoir != null) { - return reservoir; - } else { - return null; - } - } - conditionsCheck(weight) { - var capacity; - capacity = this.computeCapacity(); - return capacity == null || weight <= capacity; - } - async __incrementReservoir__(incr) { - var reservoir; - await this.yieldLoop(); - reservoir = this.storeOptions.reservoir += incr; - this.instance._drainAll(this.computeCapacity()); - return reservoir; - } - async __currentReservoir__() { - await this.yieldLoop(); - return this.storeOptions.reservoir; - } - isBlocked(now) { - return this._unblockTime >= now; - } - check(weight, now) { - return this.conditionsCheck(weight) && this._nextRequest - now <= 0; - } - async __check__(weight) { - var now; - await this.yieldLoop(); - now = Date.now(); - return this.check(weight, now); - } - async __register__(index, weight, expiration) { - var now, wait; - await this.yieldLoop(); - now = Date.now(); - if (this.conditionsCheck(weight)) { - this._running += weight; - if (this.storeOptions.reservoir != null) { - this.storeOptions.reservoir -= weight; - } - wait = Math.max(this._nextRequest - now, 0); - this._nextRequest = now + wait + this.storeOptions.minTime; - return { - success: true, - wait, - reservoir: this.storeOptions.reservoir - }; - } else { - return { - success: false - }; - } - } - strategyIsBlock() { - return this.storeOptions.strategy === 3; - } - async __submit__(queueLength, weight) { - var blocked, now, reachedHWM; - await this.yieldLoop(); - if (this.storeOptions.maxConcurrent != null && weight > this.storeOptions.maxConcurrent) { - throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); - } - now = Date.now(); - reachedHWM = this.storeOptions.highWater != null && queueLength === this.storeOptions.highWater && !this.check(weight, now); - blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); - if (blocked) { - this._unblockTime = now + this.computePenalty(); - this._nextRequest = this._unblockTime + this.storeOptions.minTime; - this.instance._dropAllQueued(); - } - return { - reachedHWM, - blocked, - strategy: this.storeOptions.strategy - }; - } - async __free__(index, weight) { - await this.yieldLoop(); - this._running -= weight; - this._done += weight; - this.instance._drainAll(this.computeCapacity()); - return { - running: this._running - }; - } - }; - var LocalDatastore_1 = LocalDatastore; - var BottleneckError$3, States; - BottleneckError$3 = BottleneckError_1; - States = class States { - constructor(status1) { - this.status = status1; - this._jobs = {}; - this.counts = this.status.map(function() { - return 0; - }); - } - next(id) { - var current, next; - current = this._jobs[id]; - next = current + 1; - if (current != null && next < this.status.length) { - this.counts[current]--; - this.counts[next]++; - return this._jobs[id]++; - } else if (current != null) { - this.counts[current]--; - return delete this._jobs[id]; - } - } - start(id) { - var initial; - initial = 0; - this._jobs[id] = initial; - return this.counts[initial]++; - } - remove(id) { - var current; - current = this._jobs[id]; - if (current != null) { - this.counts[current]--; - delete this._jobs[id]; - } - return current != null; - } - jobStatus(id) { - var ref; - return (ref = this.status[this._jobs[id]]) != null ? ref : null; - } - statusJobs(status) { - var k, pos, ref, results, v; - if (status != null) { - pos = this.status.indexOf(status); - if (pos < 0) { - throw new BottleneckError$3(`status must be one of ${this.status.join(", ")}`); - } - ref = this._jobs; - results = []; - for (k in ref) { - v = ref[k]; - if (v === pos) { - results.push(k); - } - } - return results; - } else { - return Object.keys(this._jobs); - } - } - statusCounts() { - return this.counts.reduce(((acc, v, i) => { - acc[this.status[i]] = v; - return acc; - }), {}); - } - }; - var States_1 = States; - var DLList$2, Sync; - DLList$2 = DLList_1; - Sync = class Sync { - constructor(name, Promise2) { - this.schedule = this.schedule.bind(this); - this.name = name; - this.Promise = Promise2; - this._running = 0; - this._queue = new DLList$2(); - } - isEmpty() { - return this._queue.length === 0; + isEmpty() { + return this._queue.length === 0; } async _tryToRun() { var args, cb, error3, reject, resolve2, returned, task; @@ -66014,1306 +65192,1476 @@ var require_light = __commonJS({ } }); -// node_modules/jsonschema/lib/helpers.js -var require_helpers = __commonJS({ - "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { +// node_modules/@actions/glob/lib/internal-glob-options-helper.js +var require_internal_glob_options_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports2) { "use strict"; - var uri = require("url"); - var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path3, name, argument) { - if (Array.isArray(path3)) { - this.path = path3; - this.property = path3.reduce(function(sum, item) { - return sum + makeSuffix(item); - }, "instance"); - } else if (path3 !== void 0) { - this.property = path3; - } - if (message) { - this.message = message; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - if (schema2) { - var id = schema2.$id || schema2.id; - this.schema = id || schema2; + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getOptions = getOptions; + var core12 = __importStar2(require_core()); + function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + matchDirectories: true, + omitBrokenSymbolicLinks: true, + excludeHiddenFiles: false + }; + if (copy) { + if (typeof copy.followSymbolicLinks === "boolean") { + result.followSymbolicLinks = copy.followSymbolicLinks; + core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === "boolean") { + result.implicitDescendants = copy.implicitDescendants; + core12.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.matchDirectories === "boolean") { + result.matchDirectories = copy.matchDirectories; + core12.debug(`matchDirectories '${result.matchDirectories}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === "boolean") { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + if (typeof copy.excludeHiddenFiles === "boolean") { + result.excludeHiddenFiles = copy.excludeHiddenFiles; + core12.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + } } - if (instance !== void 0) { - this.instance = instance; + return result; + } + } +}); + +// node_modules/@actions/glob/lib/internal-path-helper.js +var require_internal_path_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-path-helper.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - this.name = name; - this.argument = argument; - this.stack = this.toString(); - }; - ValidationError.prototype.toString = function toString2() { - return this.property + " " + this.message; - }; - var ValidatorResult = exports2.ValidatorResult = function ValidatorResult2(instance, schema2, options, ctx) { - this.instance = instance; - this.schema = schema2; - this.options = options; - this.path = ctx.path; - this.propertyPath = ctx.propertyPath; - this.errors = []; - this.throwError = options && options.throwError; - this.throwFirst = options && options.throwFirst; - this.throwAll = options && options.throwAll; - this.disableFormat = options && options.disableFormat === true; + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; }; - ValidatorResult.prototype.addError = function addError(detail) { - var err; - if (typeof detail == "string") { - err = new ValidationError(detail, this.instance, this.schema, this.path); - } else { - if (!detail) throw new Error("Missing error detail"); - if (!detail.message) throw new Error("Missing error message"); - if (!detail.name) throw new Error("Missing validator type"); - err = new ValidationError(detail.message, this.instance, this.schema, this.path, detail.name, detail.argument); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.dirname = dirname; + exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; + exports2.hasAbsoluteRoot = hasAbsoluteRoot; + exports2.hasRoot = hasRoot; + exports2.normalizeSeparators = normalizeSeparators; + exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; + var path4 = __importStar2(require("path")); + var assert_1 = __importDefault2(require("assert")); + var IS_WINDOWS = process.platform === "win32"; + function dirname(p) { + p = safeTrimTrailingSeparator(p); + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; } - this.errors.push(err); - if (this.throwFirst) { - throw new ValidatorResultError(this); - } else if (this.throwError) { - throw err; + let result = path4.dirname(p); + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); } - return err; - }; - ValidatorResult.prototype.importErrors = function importErrors(res) { - if (typeof res == "string" || res && res.validatorType) { - this.addError(res); - } else if (res && res.errors) { - this.errors = this.errors.concat(res.errors); + return result; + } + function ensureAbsoluteRoot(root, itemPath) { + (0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + (0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + if (hasAbsoluteRoot(itemPath)) { + return itemPath; } - }; - function stringizer(v, i) { - return i + ": " + v.toString() + "\n"; + if (IS_WINDOWS) { + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + if (itemPath.length === 2) { + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } else { + if (!cwd.endsWith("\\")) { + cwd += "\\"; + } + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { + } else { + root += path4.sep; + } + return root + itemPath; } - ValidatorResult.prototype.toString = function toString2(res) { - return this.errors.map(stringizer).join(""); - }; - Object.defineProperty(ValidatorResult.prototype, "valid", { get: function() { - return !this.errors.length; - } }); - module2.exports.ValidatorResultError = ValidatorResultError; - function ValidatorResultError(result) { - if (Error.captureStackTrace) { - Error.captureStackTrace(this, ValidatorResultError); + function hasAbsoluteRoot(itemPath) { + (0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + itemPath = normalizeSeparators(itemPath); + if (IS_WINDOWS) { + return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); } - this.instance = result.instance; - this.schema = result.schema; - this.options = result.options; - this.errors = result.errors; + return itemPath.startsWith("/"); } - ValidatorResultError.prototype = new Error(); - ValidatorResultError.prototype.constructor = ValidatorResultError; - ValidatorResultError.prototype.name = "Validation Error"; - var SchemaError = exports2.SchemaError = function SchemaError2(msg, schema2) { - this.message = msg; - this.schema = schema2; - Error.call(this, msg); - Error.captureStackTrace(this, SchemaError2); - }; - SchemaError.prototype = Object.create( - Error.prototype, - { - constructor: { value: SchemaError, enumerable: false }, - name: { value: "SchemaError", enumerable: false } + function hasRoot(itemPath) { + (0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); + itemPath = normalizeSeparators(itemPath); + if (IS_WINDOWS) { + return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); } - ); - var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path3, base, schemas) { - this.schema = schema2; - this.options = options; - if (Array.isArray(path3)) { - this.path = path3; - this.propertyPath = path3.reduce(function(sum, item) { - return sum + makeSuffix(item); - }, "instance"); - } else { - this.propertyPath = path3; + return itemPath.startsWith("/"); + } + function normalizeSeparators(p) { + p = p || ""; + if (IS_WINDOWS) { + p = p.replace(/\//g, "\\"); + const isUnc = /^\\\\+[^\\]/.test(p); + return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); } - this.base = base; - this.schemas = schemas; - }; - SchemaContext.prototype.resolve = function resolve2(target) { - return uri.resolve(this.base, target); - }; - SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { - var path3 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); - var id = schema2.$id || schema2.id; - var base = uri.resolve(this.base, id || ""); - var ctx = new SchemaContext(schema2, this.options, path3, base, Object.create(this.schemas)); - if (id && !ctx.schemas[base]) { - ctx.schemas[base] = schema2; + return p.replace(/\/\/+/g, "/"); + } + function safeTrimTrailingSeparator(p) { + if (!p) { + return ""; } - return ctx; - }; - var FORMAT_REGEXPS = exports2.FORMAT_REGEXPS = { - // 7.3.1. Dates, Times, and Duration - "date-time": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/, - "date": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/, - "time": /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/, - "duration": /P(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S)|\d+(D|M(\d+D)?|Y(\d+M(\d+D)?)?)(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S))?|\d+W)/i, - // 7.3.2. Email Addresses - // TODO: fix the email production - "email": /^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/, - "idn-email": /^("(?:[!#-\[\]-\u{10FFFF}]|\\[\t -\u{10FFFF}])*"|[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*)@([!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*|\[[!-Z\^-\u{10FFFF}]*\])$/u, - // 7.3.3. Hostnames - // 7.3.4. IP Addresses - "ip-address": /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/, - // FIXME whitespace is invalid - "ipv6": /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/, - // 7.3.5. Resource Identifiers - // TODO: A more accurate regular expression for "uri" goes: - // [A-Za-z][+\-.0-9A-Za-z]*:((/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?)?#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])|/?%[0-9A-Fa-f]{2}|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*(#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?)? - "uri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, - "uri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/, - "iri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, - "iri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~-\u{10FFFF}]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~-\u{10FFFF}])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/u, - "uuid": /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i, - // 7.3.6. uri-template - "uri-template": /(%[0-9a-f]{2}|[!#$&(-;=?@\[\]_a-z~]|\{[!#&+,./;=?@|]?(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?(,(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?)*\})*/iu, - // 7.3.7. JSON Pointers - "json-pointer": /^(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*$/iu, - "relative-json-pointer": /^\d+(#|(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*)$/iu, - // hostname regex from: http://stackoverflow.com/a/1420225/5628 - "hostname": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, - "host-name": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, - "utc-millisec": function(input) { - return typeof input === "string" && parseFloat(input) === parseInt(input, 10) && !isNaN(input); - }, - // 7.3.8. regex - "regex": function(input) { - var result = true; - try { - new RegExp(input); - } catch (e) { - result = false; - } - return result; - }, - // Other definitions - // "style" was removed from JSON Schema in draft-4 and is deprecated - "style": /[\r\n\t ]*[^\r\n\t ][^:]*:[\r\n\t ]*[^\r\n\t ;]*[\r\n\t ]*;?/, - // "color" was removed from JSON Schema in draft-4 and is deprecated - "color": /^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/, - "phone": /^\+(?:[0-9] ?){6,14}[0-9]$/, - "alpha": /^[a-zA-Z]+$/, - "alphanumeric": /^[a-zA-Z0-9]+$/ - }; - FORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex; - FORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex; - FORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS["ip-address"]; - exports2.isFormat = function isFormat(input, format, validator) { - if (typeof input === "string" && FORMAT_REGEXPS[format] !== void 0) { - if (FORMAT_REGEXPS[format] instanceof RegExp) { - return FORMAT_REGEXPS[format].test(input); - } - if (typeof FORMAT_REGEXPS[format] === "function") { - return FORMAT_REGEXPS[format](input); - } - } else if (validator && validator.customFormats && typeof validator.customFormats[format] === "function") { - return validator.customFormats[format](input); + p = normalizeSeparators(p); + if (!p.endsWith(path4.sep)) { + return p; } - return true; - }; - var makeSuffix = exports2.makeSuffix = function makeSuffix2(key) { - key = key.toString(); - if (!key.match(/[.\s\[\]]/) && !key.match(/^[\d]/)) { - return "." + key; + if (p === path4.sep) { + return p; } - if (key.match(/^\d+$/)) { - return "[" + key + "]"; + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; } - return "[" + JSON.stringify(key) + "]"; - }; - exports2.deepCompareStrict = function deepCompareStrict(a, b) { - if (typeof a !== typeof b) { - return false; + return p.substr(0, p.length - 1); + } + } +}); + +// node_modules/@actions/glob/lib/internal-match-kind.js +var require_internal_match_kind = __commonJS({ + "node_modules/@actions/glob/lib/internal-match-kind.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.MatchKind = void 0; + var MatchKind; + (function(MatchKind2) { + MatchKind2[MatchKind2["None"] = 0] = "None"; + MatchKind2[MatchKind2["Directory"] = 1] = "Directory"; + MatchKind2[MatchKind2["File"] = 2] = "File"; + MatchKind2[MatchKind2["All"] = 3] = "All"; + })(MatchKind || (exports2.MatchKind = MatchKind = {})); + } +}); + +// node_modules/@actions/glob/lib/internal-pattern-helper.js +var require_internal_pattern_helper = __commonJS({ + "node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - if (Array.isArray(a)) { - if (!Array.isArray(b)) { - return false; - } - if (a.length !== b.length) { - return false; + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - return a.every(function(v, i) { - return deepCompareStrict(a[i], b[i]); - }); + __setModuleDefault2(result, mod); + return result; + }; + })(); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getSearchPaths = getSearchPaths; + exports2.match = match; + exports2.partialMatch = partialMatch; + var pathHelper = __importStar2(require_internal_path_helper()); + var internal_match_kind_1 = require_internal_match_kind(); + var IS_WINDOWS = process.platform === "win32"; + function getSearchPaths(patterns) { + patterns = patterns.filter((x) => !x.negate); + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; + searchPathMap[key] = "candidate"; } - if (typeof a === "object") { - if (!a || !b) { - return a === b; + const result = []; + for (const pattern of patterns) { + const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; + if (searchPathMap[key] === "included") { + continue; } - var aKeys = Object.keys(a); - var bKeys = Object.keys(b); - if (aKeys.length !== bKeys.length) { - return false; + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); } - return aKeys.every(function(v) { - return deepCompareStrict(a[v], b[v]); - }); - } - return a === b; - }; - function deepMerger(target, dst, e, i) { - if (typeof e === "object") { - dst[i] = deepMerge(target[i], e); - } else { - if (target.indexOf(e) === -1) { - dst.push(e); + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = "included"; } } + return result; } - function copyist(src, dst, key) { - dst[key] = src[key]; - } - function copyistWithDeepMerge(target, src, dst, key) { - if (typeof src[key] !== "object" || !src[key]) { - dst[key] = src[key]; - } else { - if (!target[key]) { - dst[key] = src[key]; + function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); } else { - dst[key] = deepMerge(target[key], src[key]); - } - } - } - function deepMerge(target, src) { - var array = Array.isArray(src); - var dst = array && [] || {}; - if (array) { - target = target || []; - dst = dst.concat(target); - src.forEach(deepMerger.bind(null, target, dst)); - } else { - if (target && typeof target === "object") { - Object.keys(target).forEach(copyist.bind(null, target, dst)); + result |= pattern.match(itemPath); } - Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst)); } - return dst; + return result; } - module2.exports.deepMerge = deepMerge; - exports2.objectGetPath = function objectGetPath(o, s) { - var parts = s.split("/").slice(1); - var k; - while (typeof (k = parts.shift()) == "string") { - var n = decodeURIComponent(k.replace(/~0/, "~").replace(/~1/g, "/")); - if (!(n in o)) return; - o = o[n]; - } - return o; - }; - function pathEncoder(v) { - return "/" + encodeURIComponent(v).replace(/~/g, "%7E"); + function partialMatch(patterns, itemPath) { + return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); } - exports2.encodePath = function encodePointer(a) { - return a.map(pathEncoder).join(""); - }; - exports2.getDecimalPlaces = function getDecimalPlaces(number) { - var decimalPlaces = 0; - if (isNaN(number)) return decimalPlaces; - if (typeof number !== "number") { - number = Number(number); - } - var parts = number.toString().split("e"); - if (parts.length === 2) { - if (parts[1][0] !== "-") { - return decimalPlaces; - } else { - decimalPlaces = Number(parts[1].slice(1)); - } - } - var decimalParts = parts[0].split("."); - if (decimalParts.length === 2) { - decimalPlaces += decimalParts[1].length; + } +}); + +// node_modules/concat-map/index.js +var require_concat_map = __commonJS({ + "node_modules/concat-map/index.js"(exports2, module2) { + module2.exports = function(xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); } - return decimalPlaces; + return res; }; - exports2.isSchema = function isSchema(val) { - return typeof val === "object" && val || typeof val === "boolean"; + var isArray = Array.isArray || function(xs) { + return Object.prototype.toString.call(xs) === "[object Array]"; }; } }); -// node_modules/jsonschema/lib/attribute.js -var require_attribute = __commonJS({ - "node_modules/jsonschema/lib/attribute.js"(exports2, module2) { +// node_modules/balanced-match/index.js +var require_balanced_match = __commonJS({ + "node_modules/balanced-match/index.js"(exports2, module2) { "use strict"; - var helpers = require_helpers(); - var ValidatorResult = helpers.ValidatorResult; - var SchemaError = helpers.SchemaError; - var attribute = {}; - attribute.ignoreProperties = { - // informative properties - "id": true, - "default": true, - "description": true, - "title": true, - // arguments to other properties - "additionalItems": true, - "then": true, - "else": true, - // special-handled properties - "$schema": true, - "$ref": true, - "extends": true - }; - var validators = attribute.validators = {}; - validators.type = function validateType(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - var result = new ValidatorResult(instance, schema2, options, ctx); - var types = Array.isArray(schema2.type) ? schema2.type : [schema2.type]; - if (!types.some(this.testType.bind(this, instance, schema2, options, ctx))) { - var list = types.map(function(v) { - if (!v) return; - var id = v.$id || v.id; - return id ? "<" + id + ">" : v + ""; - }); - result.addError({ - name: "type", - argument: list, - message: "is not of a type(s) " + list - }); - } - return result; - }; - function testSchemaNoThrow(instance, options, ctx, callback, schema2) { - var throwError2 = options.throwError; - var throwAll = options.throwAll; - options.throwError = false; - options.throwAll = false; - var res = this.validateSchema(instance, schema2, options, ctx); - options.throwError = throwError2; - options.throwAll = throwAll; - if (!res.valid && callback instanceof Function) { - callback(res); - } - return res.valid; + module2.exports = balanced; + function balanced(a, b, str2) { + if (a instanceof RegExp) a = maybeMatch(a, str2); + if (b instanceof RegExp) b = maybeMatch(b, str2); + var r = range(a, b, str2); + return r && { + start: r[0], + end: r[1], + pre: str2.slice(0, r[0]), + body: str2.slice(r[0] + a.length, r[1]), + post: str2.slice(r[1] + b.length) + }; } - validators.anyOf = function validateAnyOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - var result = new ValidatorResult(instance, schema2, options, ctx); - var inner = new ValidatorResult(instance, schema2, options, ctx); - if (!Array.isArray(schema2.anyOf)) { - throw new SchemaError("anyOf must be an array"); - } - if (!schema2.anyOf.some( - testSchemaNoThrow.bind( - this, - instance, - options, - ctx, - function(res) { - inner.importErrors(res); + function maybeMatch(reg, str2) { + var m = str2.match(reg); + return m ? m[0] : null; + } + balanced.range = range; + function range(a, b, str2) { + var begs, beg, left, right, result; + var ai = str2.indexOf(a); + var bi = str2.indexOf(b, ai + 1); + var i = ai; + if (ai >= 0 && bi > 0) { + begs = []; + left = str2.length; + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str2.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [begs.pop(), bi]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + bi = str2.indexOf(b, i + 1); } - ) - )) { - var list = schema2.anyOf.map(function(v, i) { - var id = v.$id || v.id; - if (id) return "<" + id + ">"; - return v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - }); - if (options.nestedErrors) { - result.importErrors(inner); + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length) { + result = [left, right]; } - result.addError({ - name: "anyOf", - argument: list, - message: "is not any of " + list.join(",") - }); } return result; - }; - validators.allOf = function validateAllOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; + } + } +}); + +// node_modules/brace-expansion/index.js +var require_brace_expansion = __commonJS({ + "node_modules/brace-expansion/index.js"(exports2, module2) { + var concatMap = require_concat_map(); + var balanced = require_balanced_match(); + module2.exports = expandTop; + var escSlash = "\0SLASH" + Math.random() + "\0"; + var escOpen = "\0OPEN" + Math.random() + "\0"; + var escClose = "\0CLOSE" + Math.random() + "\0"; + var escComma = "\0COMMA" + Math.random() + "\0"; + var escPeriod = "\0PERIOD" + Math.random() + "\0"; + function numeric(str2) { + return parseInt(str2, 10) == str2 ? parseInt(str2, 10) : str2.charCodeAt(0); + } + function escapeBraces(str2) { + return str2.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); + } + function unescapeBraces(str2) { + return str2.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); + } + function parseCommaParts(str2) { + if (!str2) + return [""]; + var parts = []; + var m = balanced("{", "}", str2); + if (!m) + return str2.split(","); + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); } - if (!Array.isArray(schema2.allOf)) { - throw new SchemaError("allOf must be an array"); + parts.push.apply(parts, p); + return parts; + } + function expandTop(str2) { + if (!str2) + return []; + if (str2.substr(0, 2) === "{}") { + str2 = "\\{\\}" + str2.substr(2); } - var result = new ValidatorResult(instance, schema2, options, ctx); - var self2 = this; - schema2.allOf.forEach(function(v, i) { - var valid2 = self2.validateSchema(instance, v, options, ctx); - if (!valid2.valid) { - var id = v.$id || v.id; - var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - result.addError({ - name: "allOf", - argument: { id: msg, length: valid2.errors.length, valid: valid2 }, - message: "does not match allOf schema " + msg + " with " + valid2.errors.length + " error[s]:" - }); - result.importErrors(valid2); + return expand2(escapeBraces(str2), true).map(unescapeBraces); + } + function embrace(str2) { + return "{" + str2 + "}"; + } + function isPadded(el) { + return /^-?0\d/.test(el); + } + function lte(i, y) { + return i <= y; + } + function gte4(i, y) { + return i >= y; + } + function expand2(str2, isTop) { + var expansions = []; + var m = balanced("{", "}", str2); + if (!m || /\$$/.test(m.pre)) return [str2]; + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m.post.match(/,(?!,).*\}/)) { + str2 = m.pre + "{" + m.body + escClose + m.post; + return expand2(str2); } - }); - return result; - }; - validators.oneOf = function validateOneOf(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - if (!Array.isArray(schema2.oneOf)) { - throw new SchemaError("oneOf must be an array"); + return [str2]; } - var result = new ValidatorResult(instance, schema2, options, ctx); - var inner = new ValidatorResult(instance, schema2, options, ctx); - var count = schema2.oneOf.filter( - testSchemaNoThrow.bind( - this, - instance, - options, - ctx, - function(res) { - inner.importErrors(res); + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + n = expand2(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length ? expand2(m.post, false) : [""]; + return post.map(function(p) { + return m.pre + n[0] + p; + }); } - ) - ).length; - var list = schema2.oneOf.map(function(v, i) { - var id = v.$id || v.id; - return id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; - }); - if (count !== 1) { - if (options.nestedErrors) { - result.importErrors(inner); } - result.addError({ - name: "oneOf", - argument: list, - message: "is not exactly one from " + list.join(",") - }); } - return result; - }; - validators.if = function validateIf(instance, schema2, options, ctx) { - if (instance === void 0) return null; - if (!helpers.isSchema(schema2.if)) throw new Error('Expected "if" keyword to be a schema'); - var ifValid = testSchemaNoThrow.call(this, instance, options, ctx, null, schema2.if); - var result = new ValidatorResult(instance, schema2, options, ctx); - var res; - if (ifValid) { - if (schema2.then === void 0) return; - if (!helpers.isSchema(schema2.then)) throw new Error('Expected "then" keyword to be a schema'); - res = this.validateSchema(instance, schema2.then, options, ctx.makeChild(schema2.then)); - result.importErrors(res); + var pre = m.pre; + var post = m.post.length ? expand2(m.post, false) : [""]; + var N; + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length); + var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte4; + } + var pad = n.some(isPadded); + N = []; + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === "\\") + c = ""; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join("0"); + if (i < 0) + c = "-" + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } } else { - if (schema2.else === void 0) return; - if (!helpers.isSchema(schema2.else)) throw new Error('Expected "else" keyword to be a schema'); - res = this.validateSchema(instance, schema2.else, options, ctx.makeChild(schema2.else)); - result.importErrors(res); + N = concatMap(n, function(el) { + return expand2(el, false); + }); } - return result; - }; - function getEnumerableProperty(object, key) { - if (Object.hasOwnProperty.call(object, key)) return object[key]; - if (!(key in object)) return; - while (object = Object.getPrototypeOf(object)) { - if (Object.propertyIsEnumerable.call(object, key)) return object[key]; + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } } + return expansions; } - validators.propertyNames = function validatePropertyNames(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var subschema = schema2.propertyNames !== void 0 ? schema2.propertyNames : {}; - if (!helpers.isSchema(subschema)) throw new SchemaError('Expected "propertyNames" to be a schema (object or boolean)'); - for (var property in instance) { - if (getEnumerableProperty(instance, property) !== void 0) { - var res = this.validateSchema(property, subschema, options, ctx.makeChild(subschema)); - result.importErrors(res); - } + } +}); + +// node_modules/minimatch/minimatch.js +var require_minimatch = __commonJS({ + "node_modules/minimatch/minimatch.js"(exports2, module2) { + module2.exports = minimatch; + minimatch.Minimatch = Minimatch; + var path4 = (function() { + try { + return require("path"); + } catch (e) { } - return result; + })() || { + sep: "/" }; - validators.properties = function validateProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var properties = schema2.properties || {}; - for (var property in properties) { - var subschema = properties[property]; - if (subschema === void 0) { - continue; - } else if (subschema === null) { - throw new SchemaError('Unexpected null, expected schema in "properties"'); - } - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, subschema, options, ctx); - } - var prop = getEnumerableProperty(instance, property); - var res = this.validateSchema(prop, subschema, options, ctx.makeChild(subschema, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); - } - return result; + minimatch.sep = path4.sep; + var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; + var expand2 = require_brace_expansion(); + var plTypes = { + "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, + "?": { open: "(?:", close: ")?" }, + "+": { open: "(?:", close: ")+" }, + "*": { open: "(?:", close: ")*" }, + "@": { open: "(?:", close: ")" } }; - function testAdditionalProperty(instance, schema2, options, ctx, property, result) { - if (!this.types.object(instance)) return; - if (schema2.properties && schema2.properties[property] !== void 0) { - return; + var qmark = "[^/]"; + var star = qmark + "*?"; + var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; + var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; + var reSpecials = charSet("().*{}+?[]^$\\!"); + function charSet(s) { + return s.split("").reduce(function(set2, c) { + set2[c] = true; + return set2; + }, {}); + } + var slashSplit = /\/+/; + minimatch.filter = filter; + function filter(pattern, options) { + options = options || {}; + return function(p, i, list) { + return minimatch(p, pattern, options); + }; + } + function ext(a, b) { + b = b || {}; + var t = {}; + Object.keys(a).forEach(function(k) { + t[k] = a[k]; + }); + Object.keys(b).forEach(function(k) { + t[k] = b[k]; + }); + return t; + } + minimatch.defaults = function(def) { + if (!def || typeof def !== "object" || !Object.keys(def).length) { + return minimatch; } - if (schema2.additionalProperties === false) { - result.addError({ - name: "additionalProperties", - argument: property, - message: "is not allowed to have the additional property " + JSON.stringify(property) - }); - } else { - var additionalProperties = schema2.additionalProperties || {}; - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, additionalProperties, options, ctx); - } - var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); + var orig = minimatch; + var m = function minimatch2(p, pattern, options) { + return orig(p, pattern, ext(def, options)); + }; + m.Minimatch = function Minimatch2(pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)); + }; + m.Minimatch.defaults = function defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + }; + m.filter = function filter2(pattern, options) { + return orig.filter(pattern, ext(def, options)); + }; + m.defaults = function defaults(options) { + return orig.defaults(ext(def, options)); + }; + m.makeRe = function makeRe2(pattern, options) { + return orig.makeRe(pattern, ext(def, options)); + }; + m.braceExpand = function braceExpand2(pattern, options) { + return orig.braceExpand(pattern, ext(def, options)); + }; + m.match = function(list, pattern, options) { + return orig.match(list, pattern, ext(def, options)); + }; + return m; + }; + Minimatch.defaults = function(def) { + return minimatch.defaults(def).Minimatch; + }; + function minimatch(p, pattern, options) { + assertValidPattern(pattern); + if (!options) options = {}; + if (!options.nocomment && pattern.charAt(0) === "#") { + return false; } + return new Minimatch(pattern, options).match(p); } - validators.patternProperties = function validatePatternProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var patternProperties = schema2.patternProperties || {}; - for (var property in instance) { - var test = true; - for (var pattern in patternProperties) { - var subschema = patternProperties[pattern]; - if (subschema === void 0) { - continue; - } else if (subschema === null) { - throw new SchemaError('Unexpected null, expected schema in "patternProperties"'); - } - try { - var regexp = new RegExp(pattern, "u"); - } catch (_e) { - regexp = new RegExp(pattern); - } - if (!regexp.test(property)) { - continue; - } - test = false; - if (typeof options.preValidateProperty == "function") { - options.preValidateProperty(instance, property, subschema, options, ctx); - } - var res = this.validateSchema(instance[property], subschema, options, ctx.makeChild(subschema, property)); - if (res.instance !== result.instance[property]) result.instance[property] = res.instance; - result.importErrors(res); - } - if (test) { - testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); - } + function Minimatch(pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options); } - return result; + assertValidPattern(pattern); + if (!options) options = {}; + pattern = pattern.trim(); + if (!options.allowWindowsEscape && path4.sep !== "/") { + pattern = pattern.split(path4.sep).join("/"); + } + this.options = options; + this.set = []; + this.pattern = pattern; + this.regexp = null; + this.negate = false; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.make(); + } + Minimatch.prototype.debug = function() { }; - validators.additionalProperties = function validateAdditionalProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - if (schema2.patternProperties) { - return null; + Minimatch.prototype.make = make; + function make() { + var pattern = this.pattern; + var options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; + return; } - var result = new ValidatorResult(instance, schema2, options, ctx); - for (var property in instance) { - testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); + if (!pattern) { + this.empty = true; + return; } - return result; - }; - validators.minProperties = function validateMinProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var keys = Object.keys(instance); - if (!(keys.length >= schema2.minProperties)) { - result.addError({ - name: "minProperties", - argument: schema2.minProperties, - message: "does not meet minimum property length of " + schema2.minProperties - }); + this.parseNegate(); + var set2 = this.globSet = this.braceExpand(); + if (options.debug) this.debug = function debug5() { + console.error.apply(console, arguments); + }; + this.debug(this.pattern, set2); + set2 = this.globParts = set2.map(function(s) { + return s.split(slashSplit); + }); + this.debug(this.pattern, set2); + set2 = set2.map(function(s, si, set3) { + return s.map(this.parse, this); + }, this); + this.debug(this.pattern, set2); + set2 = set2.filter(function(s) { + return s.indexOf(false) === -1; + }); + this.debug(this.pattern, set2); + this.set = set2; + } + Minimatch.prototype.parseNegate = parseNegate; + function parseNegate() { + var pattern = this.pattern; + var negate = false; + var options = this.options; + var negateOffset = 0; + if (options.nonegate) return; + for (var i = 0, l = pattern.length; i < l && pattern.charAt(i) === "!"; i++) { + negate = !negate; + negateOffset++; } - return result; + if (negateOffset) this.pattern = pattern.substr(negateOffset); + this.negate = negate; + } + minimatch.braceExpand = function(pattern, options) { + return braceExpand(pattern, options); }; - validators.maxProperties = function validateMaxProperties(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var keys = Object.keys(instance); - if (!(keys.length <= schema2.maxProperties)) { - result.addError({ - name: "maxProperties", - argument: schema2.maxProperties, - message: "does not meet maximum property length of " + schema2.maxProperties - }); + Minimatch.prototype.braceExpand = braceExpand; + function braceExpand(pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options; + } else { + options = {}; + } + } + pattern = typeof pattern === "undefined" ? this.pattern : pattern; + assertValidPattern(pattern); + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + return [pattern]; + } + return expand2(pattern); + } + var MAX_PATTERN_LENGTH = 1024 * 64; + var assertValidPattern = function(pattern) { + if (typeof pattern !== "string") { + throw new TypeError("invalid pattern"); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError("pattern is too long"); } - return result; }; - validators.items = function validateItems(instance, schema2, options, ctx) { + Minimatch.prototype.parse = parse2; + var SUBPARSE = {}; + function parse2(pattern, isSub) { + assertValidPattern(pattern); + var options = this.options; + if (pattern === "**") { + if (!options.noglobstar) + return GLOBSTAR; + else + pattern = "*"; + } + if (pattern === "") return ""; + var re = ""; + var hasMagic = !!options.nocase; + var escaping = false; + var patternListStack = []; + var negativeLists = []; + var stateChar; + var inClass = false; + var reClassStart = -1; + var classStart = -1; + var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; var self2 = this; - if (!this.types.array(instance)) return; - if (schema2.items === void 0) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - instance.every(function(value, i) { - if (Array.isArray(schema2.items)) { - var items = schema2.items[i] === void 0 ? schema2.additionalItems : schema2.items[i]; - } else { - var items = schema2.items; + function clearStateChar() { + if (stateChar) { + switch (stateChar) { + case "*": + re += star; + hasMagic = true; + break; + case "?": + re += qmark; + hasMagic = true; + break; + default: + re += "\\" + stateChar; + break; + } + self2.debug("clearStateChar %j %j", stateChar, re); + stateChar = false; } - if (items === void 0) { - return true; + } + for (var i = 0, len = pattern.length, c; i < len && (c = pattern.charAt(i)); i++) { + this.debug("%s %s %s %j", pattern, i, re, c); + if (escaping && reSpecials[c]) { + re += "\\" + c; + escaping = false; + continue; } - if (items === false) { - result.addError({ - name: "items", - message: "additionalItems not permitted" - }); - return false; + switch (c) { + /* istanbul ignore next */ + case "/": { + return false; + } + case "\\": + clearStateChar(); + escaping = true; + continue; + // the various stateChar values + // for the "extglob" stuff. + case "?": + case "*": + case "+": + case "@": + case "!": + this.debug("%s %s %s %j <-- stateChar", pattern, i, re, c); + if (inClass) { + this.debug(" in class"); + if (c === "!" && i === classStart + 1) c = "^"; + re += c; + continue; + } + self2.debug("call clearStateChar %j", stateChar); + clearStateChar(); + stateChar = c; + if (options.noext) clearStateChar(); + continue; + case "(": + if (inClass) { + re += "("; + continue; + } + if (!stateChar) { + re += "\\("; + continue; + } + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }); + re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; + this.debug("plType %j %j", stateChar, re); + stateChar = false; + continue; + case ")": + if (inClass || !patternListStack.length) { + re += "\\)"; + continue; + } + clearStateChar(); + hasMagic = true; + var pl = patternListStack.pop(); + re += pl.close; + if (pl.type === "!") { + negativeLists.push(pl); + } + pl.reEnd = re.length; + continue; + case "|": + if (inClass || !patternListStack.length || escaping) { + re += "\\|"; + escaping = false; + continue; + } + clearStateChar(); + re += "|"; + continue; + // these are mostly the same in regexp and glob + case "[": + clearStateChar(); + if (inClass) { + re += "\\" + c; + continue; + } + inClass = true; + classStart = i; + reClassStart = re.length; + re += c; + continue; + case "]": + if (i === classStart + 1 || !inClass) { + re += "\\" + c; + escaping = false; + continue; + } + var cs = pattern.substring(classStart + 1, i); + try { + RegExp("[" + cs + "]"); + } catch (er) { + var sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; + hasMagic = hasMagic || sp[1]; + inClass = false; + continue; + } + hasMagic = true; + inClass = false; + re += c; + continue; + default: + clearStateChar(); + if (escaping) { + escaping = false; + } else if (reSpecials[c] && !(c === "^" && inClass)) { + re += "\\"; + } + re += c; } - var res = self2.validateSchema(value, items, options, ctx.makeChild(items, i)); - if (res.instance !== result.instance[i]) result.instance[i] = res.instance; - result.importErrors(res); - return true; - }); - return result; - }; - validators.contains = function validateContains(instance, schema2, options, ctx) { - var self2 = this; - if (!this.types.array(instance)) return; - if (schema2.contains === void 0) return; - if (!helpers.isSchema(schema2.contains)) throw new Error('Expected "contains" keyword to be a schema'); - var result = new ValidatorResult(instance, schema2, options, ctx); - var count = instance.some(function(value, i) { - var res = self2.validateSchema(value, schema2.contains, options, ctx.makeChild(schema2.contains, i)); - return res.errors.length === 0; - }); - if (count === false) { - result.addError({ - name: "contains", - argument: schema2.contains, - message: "must contain an item matching given schema" + } + if (inClass) { + cs = pattern.substr(classStart + 1); + sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0]; + hasMagic = hasMagic || sp[1]; + } + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length); + this.debug("setting tail", re, pl); + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { + if (!$2) { + $2 = "\\"; + } + return $1 + $1 + $2 + "|"; }); + this.debug("tail=%j\n %s", tail, tail, pl, re); + var t = pl.type === "*" ? star : pl.type === "?" ? qmark : "\\" + pl.type; + hasMagic = true; + re = re.slice(0, pl.reStart) + t + "\\(" + tail; } - return result; - }; - validators.minimum = function validateMinimum(instance, schema2, options, ctx) { - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (schema2.exclusiveMinimum && schema2.exclusiveMinimum === true) { - if (!(instance > schema2.minimum)) { - result.addError({ - name: "minimum", - argument: schema2.minimum, - message: "must be greater than " + schema2.minimum - }); - } - } else { - if (!(instance >= schema2.minimum)) { - result.addError({ - name: "minimum", - argument: schema2.minimum, - message: "must be greater than or equal to " + schema2.minimum - }); - } + clearStateChar(); + if (escaping) { + re += "\\\\"; } - return result; - }; - validators.maximum = function validateMaximum(instance, schema2, options, ctx) { - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (schema2.exclusiveMaximum && schema2.exclusiveMaximum === true) { - if (!(instance < schema2.maximum)) { - result.addError({ - name: "maximum", - argument: schema2.maximum, - message: "must be less than " + schema2.maximum - }); + var addPatternStart = false; + switch (re.charAt(0)) { + case "[": + case ".": + case "(": + addPatternStart = true; + } + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n]; + var nlBefore = re.slice(0, nl.reStart); + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); + var nlAfter = re.slice(nl.reEnd); + nlLast += nlAfter; + var openParensBefore = nlBefore.split("(").length - 1; + var cleanAfter = nlAfter; + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); } - } else { - if (!(instance <= schema2.maximum)) { - result.addError({ - name: "maximum", - argument: schema2.maximum, - message: "must be less than or equal to " + schema2.maximum - }); + nlAfter = cleanAfter; + var dollar = ""; + if (nlAfter === "" && isSub !== SUBPARSE) { + dollar = "$"; } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; + re = newRe; } - return result; - }; - validators.exclusiveMinimum = function validateExclusiveMinimum(instance, schema2, options, ctx) { - if (typeof schema2.exclusiveMinimum === "boolean") return; - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var valid2 = instance > schema2.exclusiveMinimum; - if (!valid2) { - result.addError({ - name: "exclusiveMinimum", - argument: schema2.exclusiveMinimum, - message: "must be strictly greater than " + schema2.exclusiveMinimum - }); + if (re !== "" && hasMagic) { + re = "(?=.)" + re; } - return result; - }; - validators.exclusiveMaximum = function validateExclusiveMaximum(instance, schema2, options, ctx) { - if (typeof schema2.exclusiveMaximum === "boolean") return; - if (!this.types.number(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var valid2 = instance < schema2.exclusiveMaximum; - if (!valid2) { - result.addError({ - name: "exclusiveMaximum", - argument: schema2.exclusiveMaximum, - message: "must be strictly less than " + schema2.exclusiveMaximum - }); + if (addPatternStart) { + re = patternStart + re; } - return result; - }; - var validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy2(instance, schema2, options, ctx, validationType, errorMessage) { - if (!this.types.number(instance)) return; - var validationArgument = schema2[validationType]; - if (validationArgument == 0) { - throw new SchemaError(validationType + " cannot be zero"); + if (isSub === SUBPARSE) { + return [re, hasMagic]; } - var result = new ValidatorResult(instance, schema2, options, ctx); - var instanceDecimals = helpers.getDecimalPlaces(instance); - var divisorDecimals = helpers.getDecimalPlaces(validationArgument); - var maxDecimals = Math.max(instanceDecimals, divisorDecimals); - var multiplier = Math.pow(10, maxDecimals); - if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) { - result.addError({ - name: validationType, - argument: validationArgument, - message: errorMessage + JSON.stringify(validationArgument) - }); + if (!hasMagic) { + return globUnescape(pattern); } - return result; - }; - validators.multipleOf = function validateMultipleOf(instance, schema2, options, ctx) { - return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "multipleOf", "is not a multiple of (divisible by) "); - }; - validators.divisibleBy = function validateDivisibleBy(instance, schema2, options, ctx) { - return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "divisibleBy", "is not divisible by (multiple of) "); - }; - validators.required = function validateRequired(instance, schema2, options, ctx) { - var result = new ValidatorResult(instance, schema2, options, ctx); - if (instance === void 0 && schema2.required === true) { - result.addError({ - name: "required", - message: "is required" - }); - } else if (this.types.object(instance) && Array.isArray(schema2.required)) { - schema2.required.forEach(function(n) { - if (getEnumerableProperty(instance, n) === void 0) { - result.addError({ - name: "required", - argument: n, - message: "requires property " + JSON.stringify(n) - }); - } - }); + var flags = options.nocase ? "i" : ""; + try { + var regExp = new RegExp("^" + re + "$", flags); + } catch (er) { + return new RegExp("$."); } - return result; + regExp._glob = pattern; + regExp._src = re; + return regExp; + } + minimatch.makeRe = function(pattern, options) { + return new Minimatch(pattern, options || {}).makeRe(); }; - validators.pattern = function validatePattern(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var pattern = schema2.pattern; - try { - var regexp = new RegExp(pattern, "u"); - } catch (_e) { - regexp = new RegExp(pattern); + Minimatch.prototype.makeRe = makeRe; + function makeRe() { + if (this.regexp || this.regexp === false) return this.regexp; + var set2 = this.set; + if (!set2.length) { + this.regexp = false; + return this.regexp; } - if (!instance.match(regexp)) { - result.addError({ - name: "pattern", - argument: schema2.pattern, - message: "does not match pattern " + JSON.stringify(schema2.pattern.toString()) - }); + var options = this.options; + var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; + var flags = options.nocase ? "i" : ""; + var re = set2.map(function(pattern) { + return pattern.map(function(p) { + return p === GLOBSTAR ? twoStar : typeof p === "string" ? regExpEscape(p) : p._src; + }).join("\\/"); + }).join("|"); + re = "^(?:" + re + ")$"; + if (this.negate) re = "^(?!" + re + ").*$"; + try { + this.regexp = new RegExp(re, flags); + } catch (ex) { + this.regexp = false; } - return result; - }; - validators.format = function validateFormat(instance, schema2, options, ctx) { - if (instance === void 0) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!result.disableFormat && !helpers.isFormat(instance, schema2.format, this)) { - result.addError({ - name: "format", - argument: schema2.format, - message: "does not conform to the " + JSON.stringify(schema2.format) + " format" - }); + return this.regexp; + } + minimatch.match = function(list, pattern, options) { + options = options || {}; + var mm = new Minimatch(pattern, options); + list = list.filter(function(f) { + return mm.match(f); + }); + if (mm.options.nonull && !list.length) { + list.push(pattern); } - return result; + return list; }; - validators.minLength = function validateMinLength(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var hsp = instance.match(/[\uDC00-\uDFFF]/g); - var length = instance.length - (hsp ? hsp.length : 0); - if (!(length >= schema2.minLength)) { - result.addError({ - name: "minLength", - argument: schema2.minLength, - message: "does not meet minimum length of " + schema2.minLength - }); - } - return result; - }; - validators.maxLength = function validateMaxLength(instance, schema2, options, ctx) { - if (!this.types.string(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - var hsp = instance.match(/[\uDC00-\uDFFF]/g); - var length = instance.length - (hsp ? hsp.length : 0); - if (!(length <= schema2.maxLength)) { - result.addError({ - name: "maxLength", - argument: schema2.maxLength, - message: "does not meet maximum length of " + schema2.maxLength - }); - } - return result; - }; - validators.minItems = function validateMinItems(instance, schema2, options, ctx) { - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!(instance.length >= schema2.minItems)) { - result.addError({ - name: "minItems", - argument: schema2.minItems, - message: "does not meet minimum length of " + schema2.minItems - }); + Minimatch.prototype.match = function match(f, partial) { + if (typeof partial === "undefined") partial = this.partial; + this.debug("match", f, this.pattern); + if (this.comment) return false; + if (this.empty) return f === ""; + if (f === "/" && partial) return true; + var options = this.options; + if (path4.sep !== "/") { + f = f.split(path4.sep).join("/"); } - return result; - }; - validators.maxItems = function validateMaxItems(instance, schema2, options, ctx) { - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!(instance.length <= schema2.maxItems)) { - result.addError({ - name: "maxItems", - argument: schema2.maxItems, - message: "does not meet maximum length of " + schema2.maxItems - }); + f = f.split(slashSplit); + this.debug(this.pattern, "split", f); + var set2 = this.set; + this.debug(this.pattern, "set", set2); + var filename; + var i; + for (i = f.length - 1; i >= 0; i--) { + filename = f[i]; + if (filename) break; } - return result; - }; - function testArrays(v, i, a) { - var j, len = a.length; - for (j = i + 1, len; j < len; j++) { - if (helpers.deepCompareStrict(v, a[j])) { - return false; + for (i = 0; i < set2.length; i++) { + var pattern = set2[i]; + var file = f; + if (options.matchBase && pattern.length === 1) { + file = [filename]; + } + var hit = this.matchOne(file, pattern, partial); + if (hit) { + if (options.flipNegate) return true; + return !this.negate; } } - return true; - } - validators.uniqueItems = function validateUniqueItems(instance, schema2, options, ctx) { - if (schema2.uniqueItems !== true) return; - if (!this.types.array(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!instance.every(testArrays)) { - result.addError({ - name: "uniqueItems", - message: "contains duplicate item" - }); - } - return result; + if (options.flipNegate) return false; + return this.negate; }; - validators.dependencies = function validateDependencies(instance, schema2, options, ctx) { - if (!this.types.object(instance)) return; - var result = new ValidatorResult(instance, schema2, options, ctx); - for (var property in schema2.dependencies) { - if (instance[property] === void 0) { - continue; - } - var dep = schema2.dependencies[property]; - var childContext = ctx.makeChild(dep, property); - if (typeof dep == "string") { - dep = [dep]; - } - if (Array.isArray(dep)) { - dep.forEach(function(prop) { - if (instance[prop] === void 0) { - result.addError({ - // FIXME there's two different "dependencies" errors here with slightly different outputs - // Can we make these the same? Or should we create different error types? - name: "dependencies", - argument: childContext.propertyPath, - message: "property " + prop + " not found, required by " + childContext.propertyPath - }); + Minimatch.prototype.matchOne = function(file, pattern, partial) { + var options = this.options; + this.debug( + "matchOne", + { "this": this, file, pattern } + ); + this.debug("matchOne", file.length, pattern.length); + for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + var p = pattern[pi]; + var f = file[fi]; + this.debug(pattern, p, f); + if (p === false) return false; + if (p === GLOBSTAR) { + this.debug("GLOBSTAR", [pattern, p, f]); + var fr = fi; + var pr = pi + 1; + if (pr === pl) { + this.debug("** at the end"); + for (; fi < fl; fi++) { + if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; } - }); - } else { - var res = this.validateSchema(instance, dep, options, childContext); - if (result.instance !== res.instance) result.instance = res.instance; - if (res && res.errors.length) { - result.addError({ - name: "dependencies", - argument: childContext.propertyPath, - message: "does not meet dependency required by " + childContext.propertyPath - }); - result.importErrors(res); + return true; + } + while (fr < fl) { + var swallowee = file[fr]; + this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug("globstar found match!", fr, fl, swallowee); + return true; + } else { + if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { + this.debug("dot detected!", file, fr, pattern, pr); + break; + } + this.debug("globstar swallow a segment, and continue"); + fr++; + } + } + if (partial) { + this.debug("\n>>> no match, partial?", file, fr, pattern, pr); + if (fr === fl) return true; } + return false; } + var hit; + if (typeof p === "string") { + hit = f === p; + this.debug("string match", p, f, hit); + } else { + hit = f.match(p); + this.debug("pattern match", p, f, hit); + } + if (!hit) return false; } - return result; - }; - validators["enum"] = function validateEnum(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - if (!Array.isArray(schema2["enum"])) { - throw new SchemaError("enum expects an array", schema2); - } - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!schema2["enum"].some(helpers.deepCompareStrict.bind(null, instance))) { - result.addError({ - name: "enum", - argument: schema2["enum"], - message: "is not one of enum values: " + schema2["enum"].map(String).join(",") - }); - } - return result; - }; - validators["const"] = function validateEnum(instance, schema2, options, ctx) { - if (instance === void 0) { - return null; - } - var result = new ValidatorResult(instance, schema2, options, ctx); - if (!helpers.deepCompareStrict(schema2["const"], instance)) { - result.addError({ - name: "const", - argument: schema2["const"], - message: "does not exactly match expected constant: " + schema2["const"] - }); + if (fi === fl && pi === pl) { + return true; + } else if (fi === fl) { + return partial; + } else if (pi === pl) { + return fi === fl - 1 && file[fi] === ""; } - return result; - }; - validators.not = validators.disallow = function validateNot(instance, schema2, options, ctx) { - var self2 = this; - if (instance === void 0) return null; - var result = new ValidatorResult(instance, schema2, options, ctx); - var notTypes = schema2.not || schema2.disallow; - if (!notTypes) return null; - if (!Array.isArray(notTypes)) notTypes = [notTypes]; - notTypes.forEach(function(type2) { - if (self2.testType(instance, schema2, options, ctx, type2)) { - var id = type2 && (type2.$id || type2.id); - var schemaId = id || type2; - result.addError({ - name: "not", - argument: schemaId, - message: "is of prohibited type " + schemaId - }); - } - }); - return result; + throw new Error("wtf?"); }; - module2.exports = attribute; + function globUnescape(s) { + return s.replace(/\\(.)/g, "$1"); + } + function regExpEscape(s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); + } } }); -// node_modules/jsonschema/lib/scan.js -var require_scan = __commonJS({ - "node_modules/jsonschema/lib/scan.js"(exports2, module2) { +// node_modules/@actions/glob/lib/internal-path.js +var require_internal_path = __commonJS({ + "node_modules/@actions/glob/lib/internal-path.js"(exports2) { "use strict"; - var urilib = require("url"); - var helpers = require_helpers(); - module2.exports.SchemaScanResult = SchemaScanResult; - function SchemaScanResult(found, ref) { - this.id = found; - this.ref = ref; - } - module2.exports.scan = function scan(base, schema2) { - function scanSchema(baseuri, schema3) { - if (!schema3 || typeof schema3 != "object") return; - if (schema3.$ref) { - var resolvedUri = urilib.resolve(baseuri, schema3.$ref); - ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri] + 1 : 0; - return; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - var id = schema3.$id || schema3.id; - var ourBase = id ? urilib.resolve(baseuri, id) : baseuri; - if (ourBase) { - if (ourBase.indexOf("#") < 0) ourBase += "#"; - if (found[ourBase]) { - if (!helpers.deepCompareStrict(found[ourBase], schema3)) { - throw new Error("Schema <" + ourBase + "> already exists with different definition"); + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Path = void 0; + var path4 = __importStar2(require("path")); + var pathHelper = __importStar2(require_internal_path_helper()); + var assert_1 = __importDefault2(require("assert")); + var IS_WINDOWS = process.platform === "win32"; + var Path = class { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + if (typeof itemPath === "string") { + (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path4.sep); + } else { + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + const basename = path4.basename(remaining); + this.segments.unshift(basename); + remaining = dir; + dir = pathHelper.dirname(remaining); } - return found[ourBase]; + this.segments.unshift(remaining); } - found[ourBase] = schema3; - if (ourBase[ourBase.length - 1] == "#") { - found[ourBase.substring(0, ourBase.length - 1)] = schema3; + } else { + (0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + (0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); + segment = pathHelper.normalizeSeparators(itemPath[i]); + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } else { + (0, assert_1.default)(!segment.includes(path4.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } } } - scanArray(ourBase + "/items", Array.isArray(schema3.items) ? schema3.items : [schema3.items]); - scanArray(ourBase + "/extends", Array.isArray(schema3.extends) ? schema3.extends : [schema3.extends]); - scanSchema(ourBase + "/additionalItems", schema3.additionalItems); - scanObject(ourBase + "/properties", schema3.properties); - scanSchema(ourBase + "/additionalProperties", schema3.additionalProperties); - scanObject(ourBase + "/definitions", schema3.definitions); - scanObject(ourBase + "/patternProperties", schema3.patternProperties); - scanObject(ourBase + "/dependencies", schema3.dependencies); - scanArray(ourBase + "/disallow", schema3.disallow); - scanArray(ourBase + "/allOf", schema3.allOf); - scanArray(ourBase + "/anyOf", schema3.anyOf); - scanArray(ourBase + "/oneOf", schema3.oneOf); - scanSchema(ourBase + "/not", schema3.not); - } - function scanArray(baseuri, schemas) { - if (!Array.isArray(schemas)) return; - for (var i = 0; i < schemas.length; i++) { - scanSchema(baseuri + "/" + i, schemas[i]); - } } - function scanObject(baseuri, schemas) { - if (!schemas || typeof schemas != "object") return; - for (var p in schemas) { - scanSchema(baseuri + "/" + p, schemas[p]); + /** + * Converts the path to it's string representation + */ + toString() { + let result = this.segments[0]; + let skipSlash = result.endsWith(path4.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } else { + result += path4.sep; + } + result += this.segments[i]; } + return result; } - var found = {}; - var ref = {}; - scanSchema(base, schema2); - return new SchemaScanResult(found, ref); }; + exports2.Path = Path; } }); -// node_modules/jsonschema/lib/validator.js -var require_validator = __commonJS({ - "node_modules/jsonschema/lib/validator.js"(exports2, module2) { +// node_modules/@actions/glob/lib/internal-pattern.js +var require_internal_pattern = __commonJS({ + "node_modules/@actions/glob/lib/internal-pattern.js"(exports2) { "use strict"; - var urilib = require("url"); - var attribute = require_attribute(); - var helpers = require_helpers(); - var scanSchema = require_scan().scan; - var ValidatorResult = helpers.ValidatorResult; - var ValidatorResultError = helpers.ValidatorResultError; - var SchemaError = helpers.SchemaError; - var SchemaContext = helpers.SchemaContext; - var anonymousBase = "/"; - var Validator2 = function Validator3() { - this.customFormats = Object.create(Validator3.prototype.customFormats); - this.schemas = {}; - this.unresolvedRefs = []; - this.types = Object.create(types); - this.attributes = Object.create(attribute.validators); - }; - Validator2.prototype.customFormats = {}; - Validator2.prototype.schemas = null; - Validator2.prototype.types = null; - Validator2.prototype.attributes = null; - Validator2.prototype.unresolvedRefs = null; - Validator2.prototype.addSchema = function addSchema(schema2, base) { - var self2 = this; - if (!schema2) { - return null; - } - var scan = scanSchema(base || anonymousBase, schema2); - var ourUri = base || schema2.$id || schema2.id; - for (var uri in scan.id) { - this.schemas[uri] = scan.id[uri]; - } - for (var uri in scan.ref) { - this.unresolvedRefs.push(uri); - } - this.unresolvedRefs = this.unresolvedRefs.filter(function(uri2) { - return typeof self2.schemas[uri2] === "undefined"; - }); - return this.schemas[ourUri]; - }; - Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) { - if (!Array.isArray(schemas)) return; - for (var i = 0; i < schemas.length; i++) { - this.addSubSchema(baseuri, schemas[i]); - } - }; - Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) { - if (!schemas || typeof schemas != "object") return; - for (var p in schemas) { - this.addSubSchema(baseuri, schemas[p]); - } - }; - Validator2.prototype.setSchemas = function setSchemas(schemas) { - this.schemas = schemas; - }; - Validator2.prototype.getSchema = function getSchema(urn) { - return this.schemas[urn]; - }; - Validator2.prototype.validate = function validate(instance, schema2, options, ctx) { - if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) { - throw new SchemaError("Expected `schema` to be an object or boolean"); - } - if (!options) { - options = {}; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - var id = schema2.$id || schema2.id; - var base = urilib.resolve(options.base || anonymousBase, id || ""); - if (!ctx) { - ctx = new SchemaContext(schema2, options, [], base, Object.create(this.schemas)); - if (!ctx.schemas[base]) { - ctx.schemas[base] = schema2; - } - var found = scanSchema(base, schema2); - for (var n in found.id) { - var sch = found.id[n]; - ctx.schemas[n] = sch; + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - } - if (options.required && instance === void 0) { - var result = new ValidatorResult(instance, schema2, options, ctx); - result.addError("is required, but is undefined"); + __setModuleDefault2(result, mod); return result; - } - var result = this.validateSchema(instance, schema2, options, ctx); - if (!result) { - throw new Error("Result undefined"); - } else if (options.throwAll && result.errors.length) { - throw new ValidatorResultError(result); - } - return result; - }; - function shouldResolve(schema2) { - var ref = typeof schema2 === "string" ? schema2 : schema2.$ref; - if (typeof ref == "string") return ref; - return false; - } - Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) { - var result = new ValidatorResult(instance, schema2, options, ctx); - if (typeof schema2 === "boolean") { - if (schema2 === true) { - schema2 = {}; - } else if (schema2 === false) { - schema2 = { type: [] }; + }; + })(); + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Pattern = void 0; + var os2 = __importStar2(require("os")); + var path4 = __importStar2(require("path")); + var pathHelper = __importStar2(require_internal_path_helper()); + var assert_1 = __importDefault2(require("assert")); + var minimatch_1 = require_minimatch(); + var internal_match_kind_1 = require_internal_match_kind(); + var internal_path_1 = require_internal_path(); + var IS_WINDOWS = process.platform === "win32"; + var Pattern = class _Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + this.negate = false; + let pattern; + if (typeof patternOrNegate === "string") { + pattern = patternOrNegate.trim(); + } else { + segments = segments || []; + (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); + const root = _Pattern.getLiteral(segments[0]); + (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } } - } else if (!schema2) { - throw new Error("schema is undefined"); + while (pattern.startsWith("!")) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + pattern = _Pattern.fixupPattern(pattern, homedir); + this.segments = new internal_path_1.Path(pattern).segments; + this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path4.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + let foundGlob = false; + const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + this.rootRegExp = new RegExp(_Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? "i" : ""); + this.isImplicitPattern = isImplicitPattern; + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, "/") : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } - if (schema2["extends"]) { - if (Array.isArray(schema2["extends"])) { - var schemaobj = { schema: schema2, ctx }; - schema2["extends"].forEach(this.schemaTraverser.bind(this, schemaobj)); - schema2 = schemaobj.schema; - schemaobj.schema = null; - schemaobj.ctx = null; - schemaobj = null; + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + if (this.segments[this.segments.length - 1] === "**") { + itemPath = pathHelper.normalizeSeparators(itemPath); + if (!itemPath.endsWith(path4.sep) && this.isImplicitPattern === false) { + itemPath = `${itemPath}${path4.sep}`; + } } else { - schema2 = helpers.deepMerge(schema2, this.superResolve(schema2["extends"], ctx)); + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } + return internal_match_kind_1.MatchKind.None; } - var switchSchema = shouldResolve(schema2); - if (switchSchema) { - var resolved = this.resolve(schema2, switchSchema, ctx); - var subctx = new SchemaContext(resolved.subschema, options, ctx.path, resolved.switchSchema, ctx.schemas); - return this.validateSchema(instance, resolved.subschema, options, subctx); + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } - var skipAttributes = options && options.skipAttributes || []; - for (var key in schema2) { - if (!attribute.ignoreProperties[key] && skipAttributes.indexOf(key) < 0) { - var validatorErr = null; - var validator = this.attributes[key]; - if (validator) { - validatorErr = validator.call(this, instance, schema2, options, ctx); - } else if (options.allowUnknownAttributes === false) { - throw new SchemaError("Unsupported attribute: " + key, schema2); + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + (0, assert_1.default)(pattern, "pattern cannot be empty"); + const literalSegments = new internal_path_1.Path(pattern).segments.map((x) => _Pattern.getLiteral(x)); + (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + pattern = pathHelper.normalizeSeparators(pattern); + if (pattern === "." || pattern.startsWith(`.${path4.sep}`)) { + pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); + } else if (pattern === "~" || pattern.startsWith(`~${path4.sep}`)) { + homedir = homedir || os2.homedir(); + (0, assert_1.default)(homedir, "Unable to determine HOME directory"); + (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = _Pattern.globEscape(homedir) + pattern.substr(1); + } else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith("\\")) { + root += "\\"; } - if (validatorErr) { - result.importErrors(validatorErr); + pattern = _Pattern.globEscape(root) + pattern.substr(2); + } else if (IS_WINDOWS && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", "\\"); + if (!root.endsWith("\\")) { + root += "\\"; } + pattern = _Pattern.globEscape(root) + pattern.substr(1); + } else { + pattern = pathHelper.ensureAbsoluteRoot(_Pattern.globEscape(process.cwd()), pattern); } + return pathHelper.normalizeSeparators(pattern); } - if (typeof options.rewrite == "function") { - var value = options.rewrite.call(this, instance, schema2, options, ctx); - result.instance = value; - } - return result; - }; - Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) { - schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx)); - }; - Validator2.prototype.superResolve = function superResolve(schema2, ctx) { - var ref = shouldResolve(schema2); - if (ref) { - return this.resolve(schema2, ref, ctx).subschema; - } - return schema2; - }; - Validator2.prototype.resolve = function resolve2(schema2, switchSchema, ctx) { - switchSchema = ctx.resolve(switchSchema); - if (ctx.schemas[switchSchema]) { - return { subschema: ctx.schemas[switchSchema], switchSchema }; - } - var parsed = urilib.parse(switchSchema); - var fragment = parsed && parsed.hash; - var document2 = fragment && fragment.length && switchSchema.substr(0, switchSchema.length - fragment.length); - if (!document2 || !ctx.schemas[document2]) { - throw new SchemaError("no such schema <" + switchSchema + ">", schema2); - } - var subschema = helpers.objectGetPath(ctx.schemas[document2], fragment.substr(1)); - if (subschema === void 0) { - throw new SchemaError("no such schema " + fragment + " located in <" + document2 + ">", schema2); - } - return { subschema, switchSchema }; - }; - Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) { - if (type2 === void 0) { - return; - } else if (type2 === null) { - throw new SchemaError('Unexpected null in "type" keyword'); - } - if (typeof this.types[type2] == "function") { - return this.types[type2].call(this, instance); + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ""; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + if (c === "\\" && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } else if (c === "*" || c === "?") { + return ""; + } else if (c === "[" && i + 1 < segment.length) { + let set2 = ""; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + if (c2 === "\\" && !IS_WINDOWS && i2 + 1 < segment.length) { + set2 += segment[++i2]; + continue; + } else if (c2 === "]") { + closed = i2; + break; + } else { + set2 += c2; + } + } + if (closed >= 0) { + if (set2.length > 1) { + return ""; + } + if (set2) { + literal += set2; + i = closed; + continue; + } + } + } + literal += c; + } + return literal; } - if (type2 && typeof type2 == "object") { - var res = this.validateSchema(instance, type2, options, ctx); - return res === void 0 || !(res && res.errors.length); + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, "\\$&"); } - return true; - }; - var types = Validator2.prototype.types = {}; - types.string = function testString(instance) { - return typeof instance == "string"; - }; - types.number = function testNumber(instance) { - return typeof instance == "number" && isFinite(instance); - }; - types.integer = function testInteger(instance) { - return typeof instance == "number" && instance % 1 === 0; - }; - types.boolean = function testBoolean(instance) { - return typeof instance == "boolean"; - }; - types.array = function testArray(instance) { - return Array.isArray(instance); - }; - types["null"] = function testNull(instance) { - return instance === null; }; - types.date = function testDate(instance) { - return instance instanceof Date; - }; - types.any = function testAny(instance) { - return true; - }; - types.object = function testObject(instance) { - return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date); - }; - module2.exports = Validator2; + exports2.Pattern = Pattern; } }); -// node_modules/jsonschema/lib/index.js -var require_lib3 = __commonJS({ - "node_modules/jsonschema/lib/index.js"(exports2, module2) { +// node_modules/@actions/glob/lib/internal-search-state.js +var require_internal_search_state = __commonJS({ + "node_modules/@actions/glob/lib/internal-search-state.js"(exports2) { "use strict"; - var Validator2 = module2.exports.Validator = require_validator(); - module2.exports.ValidatorResult = require_helpers().ValidatorResult; - module2.exports.ValidatorResultError = require_helpers().ValidatorResultError; - module2.exports.ValidationError = require_helpers().ValidationError; - module2.exports.SchemaError = require_helpers().SchemaError; - module2.exports.SchemaScanResult = require_scan().SchemaScanResult; - module2.exports.scan = require_scan().scan; - module2.exports.validate = function(instance, schema2, options) { - var v = new Validator2(); - return v.validate(instance, schema2, options); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.SearchState = void 0; + var SearchState = class { + constructor(path4, level) { + this.path = path4; + this.level = level; + } }; + exports2.SearchState = SearchState; } }); -// node_modules/@actions/glob/lib/internal-glob-options-helper.js -var require_internal_glob_options_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports2) { +// node_modules/@actions/glob/lib/internal-globber.js +var require_internal_globber = __commonJS({ + "node_modules/@actions/glob/lib/internal-globber.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -67352,207 +66700,258 @@ var require_internal_glob_options_helper = __commonJS({ return result; }; })(); - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getOptions = getOptions; - var core12 = __importStar2(require_core()); - function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - matchDirectories: true, - omitBrokenSymbolicLinks: true, - excludeHiddenFiles: false - }; - if (copy) { - if (typeof copy.followSymbolicLinks === "boolean") { - result.followSymbolicLinks = copy.followSymbolicLinks; - core12.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === "boolean") { - result.implicitDescendants = copy.implicitDescendants; - core12.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.matchDirectories === "boolean") { - result.matchDirectories = copy.matchDirectories; - core12.debug(`matchDirectories '${result.matchDirectories}'`); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (typeof copy.omitBrokenSymbolicLinks === "boolean") { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core12.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (typeof copy.excludeHiddenFiles === "boolean") { - result.excludeHiddenFiles = copy.excludeHiddenFiles; - core12.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve2, reject) { + v = o[n](v), settle(resolve2, reject, v.done, v.value); + }); + }; } - return result; - } - } -}); - -// node_modules/@actions/glob/lib/internal-path-helper.js -var require_internal_path_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-path-helper.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + function settle(resolve2, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve2({ value: v2, done: d }); + }, reject); } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.dirname = dirname; - exports2.ensureAbsoluteRoot = ensureAbsoluteRoot; - exports2.hasAbsoluteRoot = hasAbsoluteRoot; - exports2.hasRoot = hasRoot; - exports2.normalizeSeparators = normalizeSeparators; - exports2.safeTrimTrailingSeparator = safeTrimTrailingSeparator; - var path3 = __importStar2(require("path")); - var assert_1 = __importDefault2(require("assert")); - var IS_WINDOWS = process.platform === "win32"; - function dirname(p) { - p = safeTrimTrailingSeparator(p); - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; - } - let result = path3.dirname(p); - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); + var __await2 = exports2 && exports2.__await || function(v) { + return this instanceof __await2 ? (this.v = v, this) : new __await2(v); + }; + var __asyncGenerator2 = exports2 && exports2.__asyncGenerator || function(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { + return this; + }, i; + function awaitReturn(f) { + return function(v) { + return Promise.resolve(v).then(f, reject); + }; } - return result; - } - function ensureAbsoluteRoot(root, itemPath) { - (0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - (0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - if (hasAbsoluteRoot(itemPath)) { - return itemPath; + function verb(n, f) { + if (g[n]) { + i[n] = function(v) { + return new Promise(function(a, b) { + q.push([n, v, a, b]) > 1 || resume(n, v); + }); + }; + if (f) i[n] = f(i[n]); + } } - if (IS_WINDOWS) { - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - if (itemPath.length === 2) { - return `${itemPath[0]}:\\${cwd.substr(3)}`; - } else { - if (!cwd.endsWith("\\")) { - cwd += "\\"; - } - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; - } - } else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; + function resume(n, v) { + try { + step(g[n](v)); + } catch (e) { + settle(q[0][3], e); } } - (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - if (root.endsWith("/") || IS_WINDOWS && root.endsWith("\\")) { - } else { - root += path3.sep; + function step(r) { + r.value instanceof __await2 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - return root + itemPath; - } - function hasAbsoluteRoot(itemPath) { - (0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - itemPath = normalizeSeparators(itemPath); - if (IS_WINDOWS) { - return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); + function fulfill(value) { + resume("next", value); } - return itemPath.startsWith("/"); - } - function hasRoot(itemPath) { - (0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); - itemPath = normalizeSeparators(itemPath); - if (IS_WINDOWS) { - return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); + function reject(value) { + resume("throw", value); } - return itemPath.startsWith("/"); - } - function normalizeSeparators(p) { - p = p || ""; - if (IS_WINDOWS) { - p = p.replace(/\//g, "\\"); - const isUnc = /^\\\\+[^\\]/.test(p); - return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); + function settle(f, v) { + if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - return p.replace(/\/\/+/g, "/"); - } - function safeTrimTrailingSeparator(p) { - if (!p) { - return ""; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DefaultGlobber = void 0; + var core12 = __importStar2(require_core()); + var fs2 = __importStar2(require("fs")); + var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); + var path4 = __importStar2(require("path")); + var patternHelper = __importStar2(require_internal_pattern_helper()); + var internal_match_kind_1 = require_internal_match_kind(); + var internal_pattern_1 = require_internal_pattern(); + var internal_search_state_1 = require_internal_search_state(); + var IS_WINDOWS = process.platform === "win32"; + var DefaultGlobber = class _DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); } - p = normalizeSeparators(p); - if (!p.endsWith(path3.sep)) { - return p; + getSearchPaths() { + return this.searchPaths.slice(); } - if (p === path3.sep) { - return p; + glob() { + return __awaiter2(this, void 0, void 0, function* () { + var _a, e_1, _b, _c; + const result = []; + try { + for (var _d = true, _e = __asyncValues2(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const itemPath = _c; + result.push(itemPath); + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); + } finally { + if (e_1) throw e_1.error; + } + } + return result; + }); } - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; + globGenerator() { + return __asyncGenerator2(this, arguments, function* globGenerator_1() { + const options = globOptionsHelper.getOptions(this.options); + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); + } + } + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core12.debug(`Search path '${searchPath}'`); + try { + yield __await2(fs2.promises.lstat(searchPath)); + } catch (err) { + if (err.code === "ENOENT") { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + const traversalChain = []; + while (stack.length) { + const item = stack.pop(); + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + const stats = yield __await2( + _DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + if (!stats) { + continue; + } + if (options.excludeHiddenFiles && path4.basename(item.path).match(/^\./)) { + continue; + } + if (stats.isDirectory()) { + if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { + yield yield __await2(item.path); + } else if (!partialMatch) { + continue; + } + const childLevel = item.level + 1; + const childItems = (yield __await2(fs2.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path4.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await2(item.path); + } + } + }); } - return p.substr(0, p.length - 1); - } - } -}); - -// node_modules/@actions/glob/lib/internal-match-kind.js -var require_internal_match_kind = __commonJS({ - "node_modules/@actions/glob/lib/internal-match-kind.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MatchKind = void 0; - var MatchKind; - (function(MatchKind2) { - MatchKind2[MatchKind2["None"] = 0] = "None"; - MatchKind2[MatchKind2["Directory"] = 1] = "Directory"; - MatchKind2[MatchKind2["File"] = 2] = "File"; - MatchKind2[MatchKind2["All"] = 3] = "All"; - })(MatchKind || (exports2.MatchKind = MatchKind = {})); + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter2(this, void 0, void 0, function* () { + const result = new _DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, "\n"); + patterns = patterns.replace(/\r/g, "\n"); + } + const lines = patterns.split("\n").map((x) => x.trim()); + for (const line of lines) { + if (!line || line.startsWith("#")) { + continue; + } else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter2(this, void 0, void 0, function* () { + let stats; + if (options.followSymbolicLinks) { + try { + stats = yield fs2.promises.stat(item.path); + } catch (err) { + if (err.code === "ENOENT") { + if (options.omitBrokenSymbolicLinks) { + core12.debug(`Broken symlink '${item.path}'`); + return void 0; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } else { + stats = yield fs2.promises.lstat(item.path); + } + if (stats.isDirectory() && options.followSymbolicLinks) { + const realPath = yield fs2.promises.realpath(item.path); + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + if (traversalChain.some((x) => x === realPath)) { + core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return void 0; + } + traversalChain.push(realPath); + } + return stats; + }); + } + }; + exports2.DefaultGlobber = DefaultGlobber; } }); -// node_modules/@actions/glob/lib/internal-pattern-helper.js -var require_internal_pattern_helper = __commonJS({ - "node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports2) { +// node_modules/@actions/glob/lib/internal-hash-files.js +var require_internal_hash_files = __commonJS({ + "node_modules/@actions/glob/lib/internal-hash-files.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -67591,1196 +66990,1360 @@ var require_internal_pattern_helper = __commonJS({ return result; }; })(); - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSearchPaths = getSearchPaths; - exports2.match = match; - exports2.partialMatch = partialMatch; - var pathHelper = __importStar2(require_internal_path_helper()); - var internal_match_kind_1 = require_internal_match_kind(); - var IS_WINDOWS = process.platform === "win32"; - function getSearchPaths(patterns) { - patterns = patterns.filter((x) => !x.negate); - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; - searchPathMap[key] = "candidate"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); } - const result = []; - for (const pattern of patterns) { - const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; - if (searchPathMap[key] === "included") { - continue; + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - tempKey = parent; - parent = pathHelper.dirname(tempKey); } - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = "included"; + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve2, reject) { + v = o[n](v), settle(resolve2, reject, v.done, v.value); + }); + }; } - return result; - } - function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); + function settle(resolve2, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve2({ value: v2, done: d }); + }, reject); + } + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.hashFiles = hashFiles; + var crypto2 = __importStar2(require("crypto")); + var core12 = __importStar2(require_core()); + var fs2 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); + var util = __importStar2(require("util")); + var path4 = __importStar2(require("path")); + function hashFiles(globber_1, currentWorkspace_1) { + return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { + var _a, e_1, _b, _c; + var _d; + const writeDelegate = verbose ? core12.info : core12.debug; + let hasMatch = false; + const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); + const result = crypto2.createHash("sha256"); + let count = 0; + try { + for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { + _c = _g.value; + _e = false; + const file = _c; + writeDelegate(file); + if (!file.startsWith(`${githubWorkspace}${path4.sep}`)) { + writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); + continue; + } + if (fs2.statSync(file).isDirectory()) { + writeDelegate(`Skip directory '${file}'.`); + continue; + } + const hash = crypto2.createHash("sha256"); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(fs2.createReadStream(file), hash); + result.write(hash.digest()); + count++; + if (!hasMatch) { + hasMatch = true; + } + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); + } finally { + if (e_1) throw e_1.error; + } + } + result.end(); + if (hasMatch) { + writeDelegate(`Found ${count} files to hash.`); + return result.digest("hex"); } else { - result |= pattern.match(itemPath); + writeDelegate(`No matches found for glob`); + return ""; } - } - return result; - } - function partialMatch(patterns, itemPath) { - return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); + }); } } }); -// node_modules/concat-map/index.js -var require_concat_map = __commonJS({ - "node_modules/concat-map/index.js"(exports2, module2) { - module2.exports = function(xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); - } - return res; - }; - var isArray = Array.isArray || function(xs) { - return Object.prototype.toString.call(xs) === "[object Array]"; - }; - } -}); - -// node_modules/balanced-match/index.js -var require_balanced_match = __commonJS({ - "node_modules/balanced-match/index.js"(exports2, module2) { +// node_modules/@actions/glob/lib/glob.js +var require_glob = __commonJS({ + "node_modules/@actions/glob/lib/glob.js"(exports2) { "use strict"; - module2.exports = balanced; - function balanced(a, b, str2) { - if (a instanceof RegExp) a = maybeMatch(a, str2); - if (b instanceof RegExp) b = maybeMatch(b, str2); - var r = range(a, b, str2); - return r && { - start: r[0], - end: r[1], - pre: str2.slice(0, r[0]), - body: str2.slice(r[0] + a.length, r[1]), - post: str2.slice(r[1] + b.length) - }; - } - function maybeMatch(reg, str2) { - var m = str2.match(reg); - return m ? m[0] : null; - } - balanced.range = range; - function range(a, b, str2) { - var begs, beg, left, right, result; - var ai = str2.indexOf(a); - var bi = str2.indexOf(b, ai + 1); - var i = ai; - if (ai >= 0 && bi > 0) { - begs = []; - left = str2.length; - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str2.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [begs.pop(), bi]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } - bi = str2.indexOf(b, i + 1); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - i = ai < bi && ai >= 0 ? ai : bi; } - if (begs.length) { - result = [left, right]; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - return result; + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.create = create; + exports2.hashFiles = hashFiles; + var internal_globber_1 = require_internal_globber(); + var internal_hash_files_1 = require_internal_hash_files(); + function create(patterns, options) { + return __awaiter2(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); + } + function hashFiles(patterns_1) { + return __awaiter2(this, arguments, void 0, function* (patterns, currentWorkspace = "", options, verbose = false) { + let followSymbolicLinks = true; + if (options && typeof options.followSymbolicLinks === "boolean") { + followSymbolicLinks = options.followSymbolicLinks; + } + const globber = yield create(patterns, { followSymbolicLinks }); + return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); + }); } } }); -// node_modules/brace-expansion/index.js -var require_brace_expansion = __commonJS({ - "node_modules/brace-expansion/index.js"(exports2, module2) { - var concatMap = require_concat_map(); - var balanced = require_balanced_match(); - module2.exports = expandTop; - var escSlash = "\0SLASH" + Math.random() + "\0"; - var escOpen = "\0OPEN" + Math.random() + "\0"; - var escClose = "\0CLOSE" + Math.random() + "\0"; - var escComma = "\0COMMA" + Math.random() + "\0"; - var escPeriod = "\0PERIOD" + Math.random() + "\0"; - function numeric(str2) { - return parseInt(str2, 10) == str2 ? parseInt(str2, 10) : str2.charCodeAt(0); - } - function escapeBraces(str2) { - return str2.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); - } - function unescapeBraces(str2) { - return str2.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); +// node_modules/@actions/cache/node_modules/semver/semver.js +var require_semver3 = __commonJS({ + "node_modules/@actions/cache/node_modules/semver/semver.js"(exports2, module2) { + exports2 = module2.exports = SemVer; + var debug5; + if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug5 = function() { + var args = Array.prototype.slice.call(arguments, 0); + args.unshift("SEMVER"); + console.log.apply(console, args); + }; + } else { + debug5 = function() { + }; } - function parseCommaParts(str2) { - if (!str2) - return [""]; - var parts = []; - var m = balanced("{", "}", str2); - if (!m) - return str2.split(","); - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(","); - p[p.length - 1] += "{" + body + "}"; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length - 1] += postParts.shift(); - p.push.apply(p, postParts); - } - parts.push.apply(parts, p); - return parts; + exports2.SEMVER_SPEC_VERSION = "2.0.0"; + var MAX_LENGTH = 256; + var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ + 9007199254740991; + var MAX_SAFE_COMPONENT_LENGTH = 16; + var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; + var re = exports2.re = []; + var safeRe = exports2.safeRe = []; + var src = exports2.src = []; + var t = exports2.tokens = {}; + var R = 0; + function tok(n) { + t[n] = R++; } - function expandTop(str2) { - if (!str2) - return []; - if (str2.substr(0, 2) === "{}") { - str2 = "\\{\\}" + str2.substr(2); + var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; + var safeRegexReplacements = [ + ["\\s", 1], + ["\\d", MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] + ]; + function makeSafeRe(value) { + for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { + var token = safeRegexReplacements[i2][0]; + var max = safeRegexReplacements[i2][1]; + value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); } - return expand2(escapeBraces(str2), true).map(unescapeBraces); - } - function embrace(str2) { - return "{" + str2 + "}"; - } - function isPadded(el) { - return /^-?0\d/.test(el); + return value; } - function lte(i, y) { - return i <= y; - } - function gte4(i, y) { - return i >= y; + tok("NUMERICIDENTIFIER"); + src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; + tok("NUMERICIDENTIFIERLOOSE"); + src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; + tok("NONNUMERICIDENTIFIER"); + src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; + tok("MAINVERSION"); + src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; + tok("MAINVERSIONLOOSE"); + src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; + tok("PRERELEASEIDENTIFIER"); + src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASEIDENTIFIERLOOSE"); + src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASE"); + src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; + tok("PRERELEASELOOSE"); + src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; + tok("BUILDIDENTIFIER"); + src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; + tok("BUILD"); + src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; + tok("FULL"); + tok("FULLPLAIN"); + src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; + src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; + tok("LOOSEPLAIN"); + src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; + tok("LOOSE"); + src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; + tok("GTLT"); + src[t.GTLT] = "((?:<|>)?=?)"; + tok("XRANGEIDENTIFIERLOOSE"); + src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; + tok("XRANGEIDENTIFIER"); + src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; + tok("XRANGEPLAIN"); + src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGEPLAINLOOSE"); + src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGE"); + src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; + tok("XRANGELOOSE"); + src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COERCE"); + src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; + tok("COERCERTL"); + re[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); + safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); + tok("LONETILDE"); + src[t.LONETILDE] = "(?:~>?)"; + tok("TILDETRIM"); + src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; + re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); + safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); + var tildeTrimReplace = "$1~"; + tok("TILDE"); + src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; + tok("TILDELOOSE"); + src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("LONECARET"); + src[t.LONECARET] = "(?:\\^)"; + tok("CARETTRIM"); + src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; + re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); + safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); + var caretTrimReplace = "$1^"; + tok("CARET"); + src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; + tok("CARETLOOSE"); + src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COMPARATORLOOSE"); + src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; + tok("COMPARATOR"); + src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; + tok("COMPARATORTRIM"); + src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; + re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); + safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); + var comparatorTrimReplace = "$1$2$3"; + tok("HYPHENRANGE"); + src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; + tok("HYPHENRANGELOOSE"); + src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; + tok("STAR"); + src[t.STAR] = "(<|>)?=?\\s*\\*"; + for (i = 0; i < R; i++) { + debug5(i, src[i]); + if (!re[i]) { + re[i] = new RegExp(src[i]); + safeRe[i] = new RegExp(makeSafeRe(src[i])); + } } - function expand2(str2, isTop) { - var expansions = []; - var m = balanced("{", "}", str2); - if (!m || /\$$/.test(m.pre)) return [str2]; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(",") >= 0; - if (!isSequence && !isOptions) { - if (m.post.match(/,(?!,).*\}/)) { - str2 = m.pre + "{" + m.body + escClose + m.post; - return expand2(str2); - } - return [str2]; + var i; + exports2.parse = parse2; + function parse2(version, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - n = expand2(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length ? expand2(m.post, false) : [""]; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } + if (version instanceof SemVer) { + return version; } - var pre = m.pre; - var post = m.post.length ? expand2(m.post, false) : [""]; - var N; - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length); - var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte4; - } - var pad = n.some(isPadded); - N = []; - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === "\\") - c = ""; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join("0"); - if (i < 0) - c = "-" + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { - return expand2(el, false); - }); + if (typeof version !== "string") { + return null; } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } + if (version.length > MAX_LENGTH) { + return null; + } + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; + if (!r.test(version)) { + return null; } - return expansions; - } - } -}); - -// node_modules/minimatch/minimatch.js -var require_minimatch = __commonJS({ - "node_modules/minimatch/minimatch.js"(exports2, module2) { - module2.exports = minimatch; - minimatch.Minimatch = Minimatch; - var path3 = (function() { try { - return require("path"); - } catch (e) { + return new SemVer(version, options); + } catch (er) { + return null; } - })() || { - sep: "/" - }; - minimatch.sep = path3.sep; - var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; - var expand2 = require_brace_expansion(); - var plTypes = { - "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, - "?": { open: "(?:", close: ")?" }, - "+": { open: "(?:", close: ")+" }, - "*": { open: "(?:", close: ")*" }, - "@": { open: "(?:", close: ")" } - }; - var qmark = "[^/]"; - var star = qmark + "*?"; - var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; - var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; - var reSpecials = charSet("().*{}+?[]^$\\!"); - function charSet(s) { - return s.split("").reduce(function(set2, c) { - set2[c] = true; - return set2; - }, {}); } - var slashSplit = /\/+/; - minimatch.filter = filter; - function filter(pattern, options) { - options = options || {}; - return function(p, i, list) { - return minimatch(p, pattern, options); - }; + exports2.valid = valid2; + function valid2(version, options) { + var v = parse2(version, options); + return v ? v.version : null; } - function ext(a, b) { - b = b || {}; - var t = {}; - Object.keys(a).forEach(function(k) { - t[k] = a[k]; - }); - Object.keys(b).forEach(function(k) { - t[k] = b[k]; - }); - return t; + exports2.clean = clean; + function clean(version, options) { + var s = parse2(version.trim().replace(/^[=v]+/, ""), options); + return s ? s.version : null; } - minimatch.defaults = function(def) { - if (!def || typeof def !== "object" || !Object.keys(def).length) { - return minimatch; + exports2.SemVer = SemVer; + function SemVer(version, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - var orig = minimatch; - var m = function minimatch2(p, pattern, options) { - return orig(p, pattern, ext(def, options)); - }; - m.Minimatch = function Minimatch2(pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)); - }; - m.Minimatch.defaults = function defaults(options) { - return orig.defaults(ext(def, options)).Minimatch; - }; - m.filter = function filter2(pattern, options) { - return orig.filter(pattern, ext(def, options)); - }; - m.defaults = function defaults(options) { - return orig.defaults(ext(def, options)); - }; - m.makeRe = function makeRe2(pattern, options) { - return orig.makeRe(pattern, ext(def, options)); - }; - m.braceExpand = function braceExpand2(pattern, options) { - return orig.braceExpand(pattern, ext(def, options)); - }; - m.match = function(list, pattern, options) { - return orig.match(list, pattern, ext(def, options)); - }; - return m; - }; - Minimatch.defaults = function(def) { - return minimatch.defaults(def).Minimatch; - }; - function minimatch(p, pattern, options) { - assertValidPattern(pattern); - if (!options) options = {}; - if (!options.nocomment && pattern.charAt(0) === "#") { - return false; + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version; + } else { + version = version.version; + } + } else if (typeof version !== "string") { + throw new TypeError("Invalid Version: " + version); } - return new Minimatch(pattern, options).match(p); - } - function Minimatch(pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options); + if (version.length > MAX_LENGTH) { + throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); } - assertValidPattern(pattern); - if (!options) options = {}; - pattern = pattern.trim(); - if (!options.allowWindowsEscape && path3.sep !== "/") { - pattern = pattern.split(path3.sep).join("/"); + if (!(this instanceof SemVer)) { + return new SemVer(version, options); } + debug5("SemVer", version, options); this.options = options; - this.set = []; - this.pattern = pattern; - this.regexp = null; - this.negate = false; - this.comment = false; - this.empty = false; - this.partial = !!options.partial; - this.make(); - } - Minimatch.prototype.debug = function() { - }; - Minimatch.prototype.make = make; - function make() { - var pattern = this.pattern; - var options = this.options; - if (!options.nocomment && pattern.charAt(0) === "#") { - this.comment = true; - return; + this.loose = !!options.loose; + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); + if (!m) { + throw new TypeError("Invalid Version: " + version); } - if (!pattern) { - this.empty = true; - return; + this.raw = version; + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError("Invalid major version"); } - this.parseNegate(); - var set2 = this.globSet = this.braceExpand(); - if (options.debug) this.debug = function debug5() { - console.error.apply(console, arguments); - }; - this.debug(this.pattern, set2); - set2 = this.globParts = set2.map(function(s) { - return s.split(slashSplit); - }); - this.debug(this.pattern, set2); - set2 = set2.map(function(s, si, set3) { - return s.map(this.parse, this); - }, this); - this.debug(this.pattern, set2); - set2 = set2.filter(function(s) { - return s.indexOf(false) === -1; - }); - this.debug(this.pattern, set2); - this.set = set2; - } - Minimatch.prototype.parseNegate = parseNegate; - function parseNegate() { - var pattern = this.pattern; - var negate = false; - var options = this.options; - var negateOffset = 0; - if (options.nonegate) return; - for (var i = 0, l = pattern.length; i < l && pattern.charAt(i) === "!"; i++) { - negate = !negate; - negateOffset++; + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError("Invalid minor version"); } - if (negateOffset) this.pattern = pattern.substr(negateOffset); - this.negate = negate; - } - minimatch.braceExpand = function(pattern, options) { - return braceExpand(pattern, options); - }; - Minimatch.prototype.braceExpand = braceExpand; - function braceExpand(pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options; - } else { - options = {}; - } + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError("Invalid patch version"); } - pattern = typeof pattern === "undefined" ? this.pattern : pattern; - assertValidPattern(pattern); - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - return [pattern]; + if (!m[4]) { + this.prerelease = []; + } else { + this.prerelease = m[4].split(".").map(function(id) { + if (/^[0-9]+$/.test(id)) { + var num = +id; + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num; + } + } + return id; + }); } - return expand2(pattern); + this.build = m[5] ? m[5].split(".") : []; + this.format(); } - var MAX_PATTERN_LENGTH = 1024 * 64; - var assertValidPattern = function(pattern) { - if (typeof pattern !== "string") { - throw new TypeError("invalid pattern"); + SemVer.prototype.format = function() { + this.version = this.major + "." + this.minor + "." + this.patch; + if (this.prerelease.length) { + this.version += "-" + this.prerelease.join("."); } - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError("pattern is too long"); + return this.version; + }; + SemVer.prototype.toString = function() { + return this.version; + }; + SemVer.prototype.compare = function(other) { + debug5("SemVer.compare", this.version, this.options, other); + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } + return this.compareMain(other) || this.comparePre(other); }; - Minimatch.prototype.parse = parse2; - var SUBPARSE = {}; - function parse2(pattern, isSub) { - assertValidPattern(pattern); - var options = this.options; - if (pattern === "**") { - if (!options.noglobstar) - return GLOBSTAR; - else - pattern = "*"; + SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - if (pattern === "") return ""; - var re = ""; - var hasMagic = !!options.nocase; - var escaping = false; - var patternListStack = []; - var negativeLists = []; - var stateChar; - var inClass = false; - var reClassStart = -1; - var classStart = -1; - var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; - var self2 = this; - function clearStateChar() { - if (stateChar) { - switch (stateChar) { - case "*": - re += star; - hasMagic = true; - break; - case "?": - re += qmark; - hasMagic = true; - break; - default: - re += "\\" + stateChar; - break; - } - self2.debug("clearStateChar %j %j", stateChar, re); - stateChar = false; - } + return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); + }; + SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - for (var i = 0, len = pattern.length, c; i < len && (c = pattern.charAt(i)); i++) { - this.debug("%s %s %s %j", pattern, i, re, c); - if (escaping && reSpecials[c]) { - re += "\\" + c; - escaping = false; + if (this.prerelease.length && !other.prerelease.length) { + return -1; + } else if (!this.prerelease.length && other.prerelease.length) { + return 1; + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0; + } + var i2 = 0; + do { + var a = this.prerelease[i2]; + var b = other.prerelease[i2]; + debug5("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { continue; + } else { + return compareIdentifiers(a, b); } - switch (c) { - /* istanbul ignore next */ - case "/": { - return false; + } while (++i2); + }; + SemVer.prototype.compareBuild = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); + } + var i2 = 0; + do { + var a = this.build[i2]; + var b = other.build[i2]; + debug5("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { + continue; + } else { + return compareIdentifiers(a, b); + } + } while (++i2); + }; + SemVer.prototype.inc = function(release2, identifier) { + switch (release2) { + case "premajor": + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc("pre", identifier); + break; + case "preminor": + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc("pre", identifier); + break; + case "prepatch": + this.prerelease.length = 0; + this.inc("patch", identifier); + this.inc("pre", identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case "prerelease": + if (this.prerelease.length === 0) { + this.inc("patch", identifier); } - case "\\": - clearStateChar(); - escaping = true; - continue; - // the various stateChar values - // for the "extglob" stuff. - case "?": - case "*": - case "+": - case "@": - case "!": - this.debug("%s %s %s %j <-- stateChar", pattern, i, re, c); - if (inClass) { - this.debug(" in class"); - if (c === "!" && i === classStart + 1) c = "^"; - re += c; - continue; - } - self2.debug("call clearStateChar %j", stateChar); - clearStateChar(); - stateChar = c; - if (options.noext) clearStateChar(); - continue; - case "(": - if (inClass) { - re += "("; - continue; - } - if (!stateChar) { - re += "\\("; - continue; - } - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }); - re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; - this.debug("plType %j %j", stateChar, re); - stateChar = false; - continue; - case ")": - if (inClass || !patternListStack.length) { - re += "\\)"; - continue; - } - clearStateChar(); - hasMagic = true; - var pl = patternListStack.pop(); - re += pl.close; - if (pl.type === "!") { - negativeLists.push(pl); - } - pl.reEnd = re.length; - continue; - case "|": - if (inClass || !patternListStack.length || escaping) { - re += "\\|"; - escaping = false; - continue; - } - clearStateChar(); - re += "|"; - continue; - // these are mostly the same in regexp and glob - case "[": - clearStateChar(); - if (inClass) { - re += "\\" + c; - continue; - } - inClass = true; - classStart = i; - reClassStart = re.length; - re += c; - continue; - case "]": - if (i === classStart + 1 || !inClass) { - re += "\\" + c; - escaping = false; - continue; + this.inc("pre", identifier); + break; + case "major": + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { + this.major++; + } + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case "minor": + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++; + } + this.patch = 0; + this.prerelease = []; + break; + case "patch": + if (this.prerelease.length === 0) { + this.patch++; + } + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case "pre": + if (this.prerelease.length === 0) { + this.prerelease = [0]; + } else { + var i2 = this.prerelease.length; + while (--i2 >= 0) { + if (typeof this.prerelease[i2] === "number") { + this.prerelease[i2]++; + i2 = -2; + } } - var cs = pattern.substring(classStart + 1, i); - try { - RegExp("[" + cs + "]"); - } catch (er) { - var sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; - hasMagic = hasMagic || sp[1]; - inClass = false; - continue; + if (i2 === -1) { + this.prerelease.push(0); } - hasMagic = true; - inClass = false; - re += c; - continue; - default: - clearStateChar(); - if (escaping) { - escaping = false; - } else if (reSpecials[c] && !(c === "^" && inClass)) { - re += "\\"; + } + if (identifier) { + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0]; + } + } else { + this.prerelease = [identifier, 0]; } - re += c; - } - } - if (inClass) { - cs = pattern.substr(classStart + 1); - sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0]; - hasMagic = hasMagic || sp[1]; - } - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length); - this.debug("setting tail", re, pl); - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { - if (!$2) { - $2 = "\\"; } - return $1 + $1 + $2 + "|"; - }); - this.debug("tail=%j\n %s", tail, tail, pl, re); - var t = pl.type === "*" ? star : pl.type === "?" ? qmark : "\\" + pl.type; - hasMagic = true; - re = re.slice(0, pl.reStart) + t + "\\(" + tail; + break; + default: + throw new Error("invalid increment argument: " + release2); } - clearStateChar(); - if (escaping) { - re += "\\\\"; + this.format(); + this.raw = this.version; + return this; + }; + exports2.inc = inc; + function inc(version, release2, loose, identifier) { + if (typeof loose === "string") { + identifier = loose; + loose = void 0; } - var addPatternStart = false; - switch (re.charAt(0)) { - case "[": - case ".": - case "(": - addPatternStart = true; + try { + return new SemVer(version, loose).inc(release2, identifier).version; + } catch (er) { + return null; } - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n]; - var nlBefore = re.slice(0, nl.reStart); - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); - var nlAfter = re.slice(nl.reEnd); - nlLast += nlAfter; - var openParensBefore = nlBefore.split("(").length - 1; - var cleanAfter = nlAfter; - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); + } + exports2.diff = diff; + function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse2(version1); + var v2 = parse2(version2); + var prefix = ""; + if (v1.prerelease.length || v2.prerelease.length) { + prefix = "pre"; + var defaultResult = "prerelease"; } - nlAfter = cleanAfter; - var dollar = ""; - if (nlAfter === "" && isSub !== SUBPARSE) { - dollar = "$"; + for (var key in v1) { + if (key === "major" || key === "minor" || key === "patch") { + if (v1[key] !== v2[key]) { + return prefix + key; + } + } } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; - re = newRe; + return defaultResult; } - if (re !== "" && hasMagic) { - re = "(?=.)" + re; + } + exports2.compareIdentifiers = compareIdentifiers; + var numeric = /^[0-9]+$/; + function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + if (anum && bnum) { + a = +a; + b = +b; } - if (addPatternStart) { - re = patternStart + re; + return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; + } + exports2.rcompareIdentifiers = rcompareIdentifiers; + function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); + } + exports2.major = major; + function major(a, loose) { + return new SemVer(a, loose).major; + } + exports2.minor = minor; + function minor(a, loose) { + return new SemVer(a, loose).minor; + } + exports2.patch = patch; + function patch(a, loose) { + return new SemVer(a, loose).patch; + } + exports2.compare = compare; + function compare(a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)); + } + exports2.compareLoose = compareLoose; + function compareLoose(a, b) { + return compare(a, b, true); + } + exports2.compareBuild = compareBuild; + function compareBuild(a, b, loose) { + var versionA = new SemVer(a, loose); + var versionB = new SemVer(b, loose); + return versionA.compare(versionB) || versionA.compareBuild(versionB); + } + exports2.rcompare = rcompare; + function rcompare(a, b, loose) { + return compare(b, a, loose); + } + exports2.sort = sort; + function sort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(a, b, loose); + }); + } + exports2.rsort = rsort; + function rsort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(b, a, loose); + }); + } + exports2.gt = gt; + function gt(a, b, loose) { + return compare(a, b, loose) > 0; + } + exports2.lt = lt; + function lt(a, b, loose) { + return compare(a, b, loose) < 0; + } + exports2.eq = eq; + function eq(a, b, loose) { + return compare(a, b, loose) === 0; + } + exports2.neq = neq; + function neq(a, b, loose) { + return compare(a, b, loose) !== 0; + } + exports2.gte = gte4; + function gte4(a, b, loose) { + return compare(a, b, loose) >= 0; + } + exports2.lte = lte; + function lte(a, b, loose) { + return compare(a, b, loose) <= 0; + } + exports2.cmp = cmp; + function cmp(a, op, b, loose) { + switch (op) { + case "===": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a === b; + case "!==": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a !== b; + case "": + case "=": + case "==": + return eq(a, b, loose); + case "!=": + return neq(a, b, loose); + case ">": + return gt(a, b, loose); + case ">=": + return gte4(a, b, loose); + case "<": + return lt(a, b, loose); + case "<=": + return lte(a, b, loose); + default: + throw new TypeError("Invalid operator: " + op); } - if (isSub === SUBPARSE) { - return [re, hasMagic]; + } + exports2.Comparator = Comparator; + function Comparator(comp, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - if (!hasMagic) { - return globUnescape(pattern); + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp; + } else { + comp = comp.value; + } } - var flags = options.nocase ? "i" : ""; - try { - var regExp = new RegExp("^" + re + "$", flags); - } catch (er) { - return new RegExp("$."); + if (!(this instanceof Comparator)) { + return new Comparator(comp, options); } - regExp._glob = pattern; - regExp._src = re; - return regExp; + comp = comp.trim().split(/\s+/).join(" "); + debug5("comparator", comp, options); + this.options = options; + this.loose = !!options.loose; + this.parse(comp); + if (this.semver === ANY) { + this.value = ""; + } else { + this.value = this.operator + this.semver.version; + } + debug5("comp", this); } - minimatch.makeRe = function(pattern, options) { - return new Minimatch(pattern, options || {}).makeRe(); - }; - Minimatch.prototype.makeRe = makeRe; - function makeRe() { - if (this.regexp || this.regexp === false) return this.regexp; - var set2 = this.set; - if (!set2.length) { - this.regexp = false; - return this.regexp; + var ANY = {}; + Comparator.prototype.parse = function(comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var m = comp.match(r); + if (!m) { + throw new TypeError("Invalid comparator: " + comp); } - var options = this.options; - var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; - var flags = options.nocase ? "i" : ""; - var re = set2.map(function(pattern) { - return pattern.map(function(p) { - return p === GLOBSTAR ? twoStar : typeof p === "string" ? regExpEscape(p) : p._src; - }).join("\\/"); - }).join("|"); - re = "^(?:" + re + ")$"; - if (this.negate) re = "^(?!" + re + ").*$"; - try { - this.regexp = new RegExp(re, flags); - } catch (ex) { - this.regexp = false; + this.operator = m[1] !== void 0 ? m[1] : ""; + if (this.operator === "=") { + this.operator = ""; } - return this.regexp; - } - minimatch.match = function(list, pattern, options) { - options = options || {}; - var mm = new Minimatch(pattern, options); - list = list.filter(function(f) { - return mm.match(f); - }); - if (mm.options.nonull && !list.length) { - list.push(pattern); + if (!m[2]) { + this.semver = ANY; + } else { + this.semver = new SemVer(m[2], this.options.loose); } - return list; }; - Minimatch.prototype.match = function match(f, partial) { - if (typeof partial === "undefined") partial = this.partial; - this.debug("match", f, this.pattern); - if (this.comment) return false; - if (this.empty) return f === ""; - if (f === "/" && partial) return true; - var options = this.options; - if (path3.sep !== "/") { - f = f.split(path3.sep).join("/"); + Comparator.prototype.toString = function() { + return this.value; + }; + Comparator.prototype.test = function(version) { + debug5("Comparator.test", version, this.options.loose); + if (this.semver === ANY || version === ANY) { + return true; } - f = f.split(slashSplit); - this.debug(this.pattern, "split", f); - var set2 = this.set; - this.debug(this.pattern, "set", set2); - var filename; - var i; - for (i = f.length - 1; i >= 0; i--) { - filename = f[i]; - if (filename) break; + if (typeof version === "string") { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false; + } } - for (i = 0; i < set2.length; i++) { - var pattern = set2[i]; - var file = f; - if (options.matchBase && pattern.length === 1) { - file = [filename]; + return cmp(version, this.operator, this.semver, this.options); + }; + Comparator.prototype.intersects = function(comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError("a Comparator is required"); + } + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + var rangeTmp; + if (this.operator === "") { + if (this.value === "") { + return true; } - var hit = this.matchOne(file, pattern, partial); - if (hit) { - if (options.flipNegate) return true; - return !this.negate; + rangeTmp = new Range2(comp.value, options); + return satisfies2(this.value, rangeTmp, options); + } else if (comp.operator === "") { + if (comp.value === "") { + return true; } + rangeTmp = new Range2(this.value, options); + return satisfies2(comp.semver, rangeTmp, options); } - if (options.flipNegate) return false; - return this.negate; + var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); + var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); + var sameSemVer = this.semver.version === comp.semver.version; + var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); + var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); + var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); + return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; }; - Minimatch.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f = file[fi]; - this.debug(pattern, p, f); - if (p === false) return false; - if (p === GLOBSTAR) { - this.debug("GLOBSTAR", [pattern, p, f]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } - } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) return true; - } - return false; - } - var hit; - if (typeof p === "string") { - hit = f === p; - this.debug("string match", p, f, hit); + exports2.Range = Range2; + function Range2(range, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + if (range instanceof Range2) { + if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { + return range; } else { - hit = f.match(p); - this.debug("pattern match", p, f, hit); + return new Range2(range.raw, options); } - if (!hit) return false; } - if (fi === fl && pi === pl) { - return true; - } else if (fi === fl) { - return partial; - } else if (pi === pl) { - return fi === fl - 1 && file[fi] === ""; + if (range instanceof Comparator) { + return new Range2(range.value, options); } - throw new Error("wtf?"); + if (!(this instanceof Range2)) { + return new Range2(range, options); + } + this.options = options; + this.loose = !!options.loose; + this.includePrerelease = !!options.includePrerelease; + this.raw = range.trim().split(/\s+/).join(" "); + this.set = this.raw.split("||").map(function(range2) { + return this.parseRange(range2.trim()); + }, this).filter(function(c) { + return c.length; + }); + if (!this.set.length) { + throw new TypeError("Invalid SemVer Range: " + this.raw); + } + this.format(); + } + Range2.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(" ").trim(); + }).join("||").trim(); + return this.range; }; - function globUnescape(s) { - return s.replace(/\\(.)/g, "$1"); + Range2.prototype.toString = function() { + return this.range; + }; + Range2.prototype.parseRange = function(range) { + var loose = this.options.loose; + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug5("hyphen replace", range); + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); + debug5("comparator trim", range, safeRe[t.COMPARATORTRIM]); + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); + range = range.split(/\s+/).join(" "); + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var set2 = range.split(" ").map(function(comp) { + return parseComparator(comp, this.options); + }, this).join(" ").split(/\s+/); + if (this.options.loose) { + set2 = set2.filter(function(comp) { + return !!comp.match(compRe); + }); + } + set2 = set2.map(function(comp) { + return new Comparator(comp, this.options); + }, this); + return set2; + }; + Range2.prototype.intersects = function(range, options) { + if (!(range instanceof Range2)) { + throw new TypeError("a Range is required"); + } + return this.set.some(function(thisComparators) { + return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { + return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { + return rangeComparators.every(function(rangeComparator) { + return thisComparator.intersects(rangeComparator, options); + }); + }); + }); + }); + }; + function isSatisfiable(comparators, options) { + var result = true; + var remainingComparators = comparators.slice(); + var testComparator = remainingComparators.pop(); + while (result && remainingComparators.length) { + result = remainingComparators.every(function(otherComparator) { + return testComparator.intersects(otherComparator, options); + }); + testComparator = remainingComparators.pop(); + } + return result; } - function regExpEscape(s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); + exports2.toComparators = toComparators; + function toComparators(range, options) { + return new Range2(range, options).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(" ").trim().split(" "); + }); } - } -}); - -// node_modules/@actions/glob/lib/internal-path.js -var require_internal_path = __commonJS({ - "node_modules/@actions/glob/lib/internal-path.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + function parseComparator(comp, options) { + debug5("comp", comp, options); + comp = replaceCarets(comp, options); + debug5("caret", comp); + comp = replaceTildes(comp, options); + debug5("tildes", comp); + comp = replaceXRanges(comp, options); + debug5("xrange", comp); + comp = replaceStars(comp, options); + debug5("stars", comp); + return comp; + } + function isX(id) { + return !id || id.toLowerCase() === "x" || id === "*"; + } + function replaceTildes(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceTilde(comp2, options); + }).join(" "); + } + function replaceTilde(comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + debug5("tilde", comp, _, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; + } else if (pr) { + debug5("replaceTilde pr", pr); + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + } else { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Path = void 0; - var path3 = __importStar2(require("path")); - var pathHelper = __importStar2(require_internal_path_helper()); - var assert_1 = __importDefault2(require("assert")); - var IS_WINDOWS = process.platform === "win32"; - var Path = class { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - if (typeof itemPath === "string") { - (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path3.sep); + debug5("tilde return", ret); + return ret; + }); + } + function replaceCarets(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceCaret(comp2, options); + }).join(" "); + } + function replaceCaret(comp, options) { + debug5("caret", comp, options); + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + debug5("caret", comp, _, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + if (M === "0") { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; } else { - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - const basename = path3.basename(remaining); - this.segments.unshift(basename); - remaining = dir; - dir = pathHelper.dirname(remaining); + ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; + } + } else if (pr) { + debug5("replaceCaret pr", pr); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); + } else { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; } - this.segments.unshift(remaining); + } else { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; } } else { - (0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - (0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); - segment = pathHelper.normalizeSeparators(itemPath[i]); - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); + debug5("no pr"); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); } else { - (0, assert_1.default)(!segment.includes(path3.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; } + } else { + ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; } } - } - /** - * Converts the path to it's string representation - */ - toString() { - let result = this.segments[0]; - let skipSlash = result.endsWith(path3.sep) || IS_WINDOWS && /^[A-Z]:$/i.test(result); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; + debug5("caret return", ret); + return ret; + }); + } + function replaceXRanges(comp, options) { + debug5("replaceXRanges", comp, options); + return comp.split(/\s+/).map(function(comp2) { + return replaceXRange(comp2, options); + }).join(" "); + } + function replaceXRange(comp, options) { + comp = comp.trim(); + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug5("xRange", comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + if (gtlt === "=" && anyX) { + gtlt = ""; + } + pr = options.includePrerelease ? "-0" : ""; + if (xM) { + if (gtlt === ">" || gtlt === "<") { + ret = "<0.0.0-0"; } else { - result += path3.sep; + ret = "*"; } - result += this.segments[i]; + } else if (gtlt && anyX) { + if (xm) { + m = 0; + } + p = 0; + if (gtlt === ">") { + gtlt = ">="; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else { + m = +m + 1; + p = 0; + } + } else if (gtlt === "<=") { + gtlt = "<"; + if (xm) { + M = +M + 1; + } else { + m = +m + 1; + } + } + ret = gtlt + M + "." + m + "." + p + pr; + } else if (xm) { + ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; + } else if (xp) { + ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; } - return result; + debug5("xRange return", ret); + return ret; + }); + } + function replaceStars(comp, options) { + debug5("replaceStars", comp, options); + return comp.trim().replace(safeRe[t.STAR], ""); + } + function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = ""; + } else if (isX(fm)) { + from = ">=" + fM + ".0.0"; + } else if (isX(fp)) { + from = ">=" + fM + "." + fm + ".0"; + } else { + from = ">=" + from; } - }; - exports2.Path = Path; - } -}); - -// node_modules/@actions/glob/lib/internal-pattern.js -var require_internal_pattern = __commonJS({ - "node_modules/@actions/glob/lib/internal-pattern.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + if (isX(tM)) { + to = ""; + } else if (isX(tm)) { + to = "<" + (+tM + 1) + ".0.0"; + } else if (isX(tp)) { + to = "<" + tM + "." + (+tm + 1) + ".0"; + } else if (tpr) { + to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; + } else { + to = "<=" + to; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + return (from + " " + to).trim(); + } + Range2.prototype.test = function(version) { + if (!version) { + return false; + } + if (typeof version === "string") { + try { + version = new SemVer(version, this.options); + } catch (er) { + return false; } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; + } + for (var i2 = 0; i2 < this.set.length; i2++) { + if (testSet(this.set[i2], version, this.options)) { + return true; + } + } + return false; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Pattern = void 0; - var os2 = __importStar2(require("os")); - var path3 = __importStar2(require("path")); - var pathHelper = __importStar2(require_internal_path_helper()); - var assert_1 = __importDefault2(require("assert")); - var minimatch_1 = require_minimatch(); - var internal_match_kind_1 = require_internal_match_kind(); - var internal_path_1 = require_internal_path(); - var IS_WINDOWS = process.platform === "win32"; - var Pattern = class _Pattern { - constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { - this.negate = false; - let pattern; - if (typeof patternOrNegate === "string") { - pattern = patternOrNegate.trim(); - } else { - segments = segments || []; - (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); - const root = _Pattern.getLiteral(segments[0]); - (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - while (pattern.startsWith("!")) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); + function testSet(set2, version, options) { + for (var i2 = 0; i2 < set2.length; i2++) { + if (!set2[i2].test(version)) { + return false; } - pattern = _Pattern.fixupPattern(pattern, homedir); - this.segments = new internal_path_1.Path(pattern).segments; - this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path3.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - let foundGlob = false; - const searchSegments = this.segments.map((x) => _Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - this.rootRegExp = new RegExp(_Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? "i" : ""); - this.isImplicitPattern = isImplicitPattern; - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, "/") : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - if (this.segments[this.segments.length - 1] === "**") { - itemPath = pathHelper.normalizeSeparators(itemPath); - if (!itemPath.endsWith(path3.sep) && this.isImplicitPattern === false) { - itemPath = `${itemPath}${path3.sep}`; + if (version.prerelease.length && !options.includePrerelease) { + for (i2 = 0; i2 < set2.length; i2++) { + debug5(set2[i2].semver); + if (set2[i2].semver === ANY) { + continue; + } + if (set2[i2].semver.prerelease.length > 0) { + var allowed = set2[i2].semver; + if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { + return true; + } } - } else { - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - } - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } - return internal_match_kind_1.MatchKind.None; + return false; } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); - } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + return true; + } + exports2.satisfies = satisfies2; + function satisfies2(version, range, options) { + try { + range = new Range2(range, options); + } catch (er) { + return false; } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); + return range.test(version); + } + exports2.maxSatisfying = maxSatisfying; + function maxSatisfying(versions, range, options) { + var max = null; + var maxSV = null; + try { + var rangeObj = new Range2(range, options); + } catch (er) { + return null; } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern, homedir) { - (0, assert_1.default)(pattern, "pattern cannot be empty"); - const literalSegments = new internal_path_1.Path(pattern).segments.map((x) => _Pattern.getLiteral(x)); - (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - pattern = pathHelper.normalizeSeparators(pattern); - if (pattern === "." || pattern.startsWith(`.${path3.sep}`)) { - pattern = _Pattern.globEscape(process.cwd()) + pattern.substr(1); - } else if (pattern === "~" || pattern.startsWith(`~${path3.sep}`)) { - homedir = homedir || os2.homedir(); - (0, assert_1.default)(homedir, "Unable to determine HOME directory"); - (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = _Pattern.globEscape(homedir) + pattern.substr(1); - } else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith("\\")) { - root += "\\"; + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!max || maxSV.compare(v) === -1) { + max = v; + maxSV = new SemVer(max, options); } - pattern = _Pattern.globEscape(root) + pattern.substr(2); - } else if (IS_WINDOWS && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", "\\"); - if (!root.endsWith("\\")) { - root += "\\"; + } + }); + return max; + } + exports2.minSatisfying = minSatisfying; + function minSatisfying(versions, range, options) { + var min = null; + var minSV = null; + try { + var rangeObj = new Range2(range, options); + } catch (er) { + return null; + } + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!min || minSV.compare(v) === 1) { + min = v; + minSV = new SemVer(min, options); } - pattern = _Pattern.globEscape(root) + pattern.substr(1); - } else { - pattern = pathHelper.ensureAbsoluteRoot(_Pattern.globEscape(process.cwd()), pattern); } - return pathHelper.normalizeSeparators(pattern); + }); + return min; + } + exports2.minVersion = minVersion; + function minVersion(range, loose) { + range = new Range2(range, loose); + var minver = new SemVer("0.0.0"); + if (range.test(minver)) { + return minver; } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ""; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - if (c === "\\" && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } else if (c === "*" || c === "?") { - return ""; - } else if (c === "[" && i + 1 < segment.length) { - let set2 = ""; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - if (c2 === "\\" && !IS_WINDOWS && i2 + 1 < segment.length) { - set2 += segment[++i2]; - continue; - } else if (c2 === "]") { - closed = i2; - break; + minver = new SemVer("0.0.0-0"); + if (range.test(minver)) { + return minver; + } + minver = null; + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + comparators.forEach(function(comparator) { + var compver = new SemVer(comparator.semver.version); + switch (comparator.operator) { + case ">": + if (compver.prerelease.length === 0) { + compver.patch++; } else { - set2 += c2; - } - } - if (closed >= 0) { - if (set2.length > 1) { - return ""; + compver.prerelease.push(0); } - if (set2) { - literal += set2; - i = closed; - continue; + compver.raw = compver.format(); + /* fallthrough */ + case "": + case ">=": + if (!minver || gt(minver, compver)) { + minver = compver; } - } + break; + case "<": + case "<=": + break; + /* istanbul ignore next */ + default: + throw new Error("Unexpected operation: " + comparator.operator); } - literal += c; + }); + } + if (minver && range.test(minver)) { + return minver; + } + return null; + } + exports2.validRange = validRange; + function validRange(range, options) { + try { + return new Range2(range, options).range || "*"; + } catch (er) { + return null; + } + } + exports2.ltr = ltr; + function ltr(version, range, options) { + return outside(version, range, "<", options); + } + exports2.gtr = gtr; + function gtr(version, range, options) { + return outside(version, range, ">", options); + } + exports2.outside = outside; + function outside(version, range, hilo, options) { + version = new SemVer(version, options); + range = new Range2(range, options); + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case ">": + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = ">"; + ecomp = ">="; + break; + case "<": + gtfn = lt; + ltefn = gte4; + ltfn = gt; + comp = "<"; + ecomp = "<="; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); + } + if (satisfies2(version, range, options)) { + return false; + } + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + var high = null; + var low = null; + comparators.forEach(function(comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator(">=0.0.0"); + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator; + } + }); + if (high.operator === comp || high.operator === ecomp) { + return false; + } + if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; } - return literal; } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, "\\$&"); + return true; + } + exports2.prerelease = prerelease; + function prerelease(version, options) { + var parsed = parse2(version, options); + return parsed && parsed.prerelease.length ? parsed.prerelease : null; + } + exports2.intersects = intersects; + function intersects(r1, r2, options) { + r1 = new Range2(r1, options); + r2 = new Range2(r2, options); + return r1.intersects(r2); + } + exports2.coerce = coerce2; + function coerce2(version, options) { + if (version instanceof SemVer) { + return version; } - }; - exports2.Pattern = Pattern; + if (typeof version === "number") { + version = String(version); + } + if (typeof version !== "string") { + return null; + } + options = options || {}; + var match = null; + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]); + } else { + var next; + while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length)) { + if (!match || next.index + next[0].length !== match.index + match[0].length) { + match = next; + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; + } + safeRe[t.COERCERTL].lastIndex = -1; + } + if (match === null) { + return null; + } + return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + } } }); -// node_modules/@actions/glob/lib/internal-search-state.js -var require_internal_search_state = __commonJS({ - "node_modules/@actions/glob/lib/internal-search-state.js"(exports2) { +// node_modules/@actions/cache/lib/internal/constants.js +var require_constants12 = __commonJS({ + "node_modules/@actions/cache/lib/internal/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.SearchState = void 0; - var SearchState = class { - constructor(path3, level) { - this.path = path3; - this.level = level; - } - }; - exports2.SearchState = SearchState; + exports2.CacheFileSizeLimit = exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; + var CacheFilename; + (function(CacheFilename2) { + CacheFilename2["Gzip"] = "cache.tgz"; + CacheFilename2["Zstd"] = "cache.tzst"; + })(CacheFilename || (exports2.CacheFilename = CacheFilename = {})); + var CompressionMethod; + (function(CompressionMethod2) { + CompressionMethod2["Gzip"] = "gzip"; + CompressionMethod2["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod2["Zstd"] = "zstd"; + })(CompressionMethod || (exports2.CompressionMethod = CompressionMethod = {})); + var ArchiveToolType; + (function(ArchiveToolType2) { + ArchiveToolType2["GNU"] = "gnu"; + ArchiveToolType2["BSD"] = "bsd"; + })(ArchiveToolType || (exports2.ArchiveToolType = ArchiveToolType = {})); + exports2.DefaultRetryAttempts = 2; + exports2.DefaultRetryDelay = 5e3; + exports2.SocketTimeout = 5e3; + exports2.GnuTarPathOnWindows = `${process.env["PROGRAMFILES"]}\\Git\\usr\\bin\\tar.exe`; + exports2.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; + exports2.TarFilename = "cache.tar"; + exports2.ManifestFilename = "manifest.txt"; + exports2.CacheFileSizeLimit = 10 * Math.pow(1024, 3); } }); -// node_modules/@actions/glob/lib/internal-globber.js -var require_internal_globber = __commonJS({ - "node_modules/@actions/glob/lib/internal-globber.js"(exports2) { +// node_modules/@actions/cache/lib/internal/cacheUtils.js +var require_cacheUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/cacheUtils.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -68865,334 +68428,73 @@ var require_internal_globber = __commonJS({ }, reject); } }; - var __await2 = exports2 && exports2.__await || function(v) { - return this instanceof __await2 ? (this.v = v, this) : new __await2(v); - }; - var __asyncGenerator2 = exports2 && exports2.__asyncGenerator || function(thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { - return this; - }, i; - function awaitReturn(f) { - return function(v) { - return Promise.resolve(v).then(f, reject); - }; - } - function verb(n, f) { - if (g[n]) { - i[n] = function(v) { - return new Promise(function(a, b) { - q.push([n, v, a, b]) > 1 || resume(n, v); - }); - }; - if (f) i[n] = f(i[n]); - } - } - function resume(n, v) { - try { - step(g[n](v)); - } catch (e) { - settle(q[0][3], e); - } - } - function step(r) { - r.value instanceof __await2 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); - } - function fulfill(value) { - resume("next", value); - } - function reject(value) { - resume("throw", value); - } - function settle(f, v) { - if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); - } - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DefaultGlobber = void 0; + exports2.createTempDirectory = createTempDirectory; + exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; + exports2.resolvePaths = resolvePaths; + exports2.unlinkFile = unlinkFile; + exports2.getCompressionMethod = getCompressionMethod; + exports2.getCacheFileName = getCacheFileName; + exports2.getGnuTarPathOnWindows = getGnuTarPathOnWindows; + exports2.assertDefined = assertDefined; + exports2.getCacheVersion = getCacheVersion; + exports2.getRuntimeToken = getRuntimeToken; var core12 = __importStar2(require_core()); - var fs = __importStar2(require("fs")); - var globOptionsHelper = __importStar2(require_internal_glob_options_helper()); - var path3 = __importStar2(require("path")); - var patternHelper = __importStar2(require_internal_pattern_helper()); - var internal_match_kind_1 = require_internal_match_kind(); - var internal_pattern_1 = require_internal_pattern(); - var internal_search_state_1 = require_internal_search_state(); - var IS_WINDOWS = process.platform === "win32"; - var DefaultGlobber = class _DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - return this.searchPaths.slice(); - } - glob() { - return __awaiter2(this, void 0, void 0, function* () { - var _a, e_1, _b, _c; - const result = []; - try { - for (var _d = true, _e = __asyncValues2(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { - _c = _f.value; - _d = false; - const itemPath = _c; - result.push(itemPath); - } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); - } finally { - if (e_1) throw e_1.error; + var exec3 = __importStar2(require_exec()); + var glob = __importStar2(require_glob()); + var io4 = __importStar2(require_io()); + var crypto2 = __importStar2(require("crypto")); + var fs2 = __importStar2(require("fs")); + var path4 = __importStar2(require("path")); + var semver6 = __importStar2(require_semver3()); + var util = __importStar2(require("util")); + var constants_1 = require_constants12(); + var versionSalt = "1.0"; + function createTempDirectory() { + return __awaiter2(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === "win32"; + let tempDirectory = process.env["RUNNER_TEMP"] || ""; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + baseLocation = process.env["USERPROFILE"] || "C:\\"; + } else { + if (process.platform === "darwin") { + baseLocation = "/Users"; + } else { + baseLocation = "/home"; } } - return result; - }); - } - globGenerator() { - return __asyncGenerator2(this, arguments, function* globGenerator_1() { - const options = globOptionsHelper.getOptions(this.options); - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); - } - } - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core12.debug(`Search path '${searchPath}'`); - try { - yield __await2(fs.promises.lstat(searchPath)); - } catch (err) { - if (err.code === "ENOENT") { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - const traversalChain = []; - while (stack.length) { - const item = stack.pop(); - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - const stats = yield __await2( - _DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - if (!stats) { - continue; - } - if (options.excludeHiddenFiles && path3.basename(item.path).match(/^\./)) { - continue; - } - if (stats.isDirectory()) { - if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { - yield yield __await2(item.path); - } else if (!partialMatch) { - continue; - } - const childLevel = item.level + 1; - const childItems = (yield __await2(fs.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path3.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await2(item.path); - } - } - }); - } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter2(this, void 0, void 0, function* () { - const result = new _DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, "\n"); - patterns = patterns.replace(/\r/g, "\n"); - } - const lines = patterns.split("\n").map((x) => x.trim()); - for (const line of lines) { - if (!line || line.startsWith("#")) { - continue; - } else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); - } - static stat(item, options, traversalChain) { - return __awaiter2(this, void 0, void 0, function* () { - let stats; - if (options.followSymbolicLinks) { - try { - stats = yield fs.promises.stat(item.path); - } catch (err) { - if (err.code === "ENOENT") { - if (options.omitBrokenSymbolicLinks) { - core12.debug(`Broken symlink '${item.path}'`); - return void 0; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } else { - stats = yield fs.promises.lstat(item.path); - } - if (stats.isDirectory() && options.followSymbolicLinks) { - const realPath = yield fs.promises.realpath(item.path); - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - if (traversalChain.some((x) => x === realPath)) { - core12.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return void 0; - } - traversalChain.push(realPath); - } - return stats; - }); - } - }; - exports2.DefaultGlobber = DefaultGlobber; - } -}); - -// node_modules/@actions/glob/lib/internal-hash-files.js -var require_internal_hash_files = __commonJS({ - "node_modules/@actions/glob/lib/internal-hash-files.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + tempDirectory = path4.join(baseLocation, "actions", "temp"); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + const dest = path4.join(tempDirectory, crypto2.randomUUID()); + yield io4.mkdirP(dest); + return dest; }); - }; - var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve2, reject) { - v = o[n](v), settle(resolve2, reject, v.done, v.value); - }); - }; - } - function settle(resolve2, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve2({ value: v2, done: d }); - }, reject); - } - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.hashFiles = hashFiles; - var crypto2 = __importStar2(require("crypto")); - var core12 = __importStar2(require_core()); - var fs = __importStar2(require("fs")); - var stream = __importStar2(require("stream")); - var util = __importStar2(require("util")); - var path3 = __importStar2(require("path")); - function hashFiles(globber_1, currentWorkspace_1) { - return __awaiter2(this, arguments, void 0, function* (globber, currentWorkspace, verbose = false) { + } + function getArchiveFileSizeInBytes(filePath) { + return fs2.statSync(filePath).size; + } + function resolvePaths(patterns) { + return __awaiter2(this, void 0, void 0, function* () { var _a, e_1, _b, _c; var _d; - const writeDelegate = verbose ? core12.info : core12.debug; - let hasMatch = false; - const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const result = crypto2.createHash("sha256"); - let count = 0; + const paths = []; + const workspace = (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); + const globber = yield glob.create(patterns.join("\n"), { + implicitDescendants: false + }); try { for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { _c = _g.value; _e = false; const file = _c; - writeDelegate(file); - if (!file.startsWith(`${githubWorkspace}${path3.sep}`)) { - writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); - continue; - } - if (fs.statSync(file).isDirectory()) { - writeDelegate(`Skip directory '${file}'.`); - continue; - } - const hash = crypto2.createHash("sha256"); - const pipeline = util.promisify(stream.pipeline); - yield pipeline(fs.createReadStream(file), hash); - result.write(hash.digest()); - count++; - if (!hasMatch) { - hasMatch = true; + const relativeFile = path4.relative(workspace, file).replace(new RegExp(`\\${path4.sep}`, "g"), "/"); + core12.debug(`Matched: ${relativeFile}`); + if (relativeFile === "") { + paths.push("."); + } else { + paths.push(`${relativeFile}`); } } } catch (e_1_1) { @@ -69204,4676 +68506,3577 @@ var require_internal_hash_files = __commonJS({ if (e_1) throw e_1.error; } } - result.end(); - if (hasMatch) { - writeDelegate(`Found ${count} files to hash.`); - return result.digest("hex"); - } else { - writeDelegate(`No matches found for glob`); - return ""; - } + return paths; }); } - } -}); - -// node_modules/@actions/glob/lib/glob.js -var require_glob = __commonJS({ - "node_modules/@actions/glob/lib/glob.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.create = create; - exports2.hashFiles = hashFiles; - var internal_globber_1 = require_internal_globber(); - var internal_hash_files_1 = require_internal_hash_files(); - function create(patterns, options) { + function unlinkFile(filePath) { return __awaiter2(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); + return util.promisify(fs2.unlink)(filePath); }); } - function hashFiles(patterns_1) { - return __awaiter2(this, arguments, void 0, function* (patterns, currentWorkspace = "", options, verbose = false) { - let followSymbolicLinks = true; - if (options && typeof options.followSymbolicLinks === "boolean") { - followSymbolicLinks = options.followSymbolicLinks; + function getVersion(app_1) { + return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { + let versionOutput = ""; + additionalArgs.push("--version"); + core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); + try { + yield exec3.exec(`${app}`, additionalArgs, { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => versionOutput += data.toString(), + stderr: (data) => versionOutput += data.toString() + } + }); + } catch (err) { + core12.debug(err.message); } - const globber = yield create(patterns, { followSymbolicLinks }); - return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); + versionOutput = versionOutput.trim(); + core12.debug(versionOutput); + return versionOutput; }); } - } -}); - -// node_modules/@actions/cache/node_modules/semver/semver.js -var require_semver3 = __commonJS({ - "node_modules/@actions/cache/node_modules/semver/semver.js"(exports2, module2) { - exports2 = module2.exports = SemVer; - var debug5; - if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug5 = function() { - var args = Array.prototype.slice.call(arguments, 0); - args.unshift("SEMVER"); - console.log.apply(console, args); - }; - } else { - debug5 = function() { - }; + function getCompressionMethod() { + return __awaiter2(this, void 0, void 0, function* () { + const versionOutput = yield getVersion("zstd", ["--quiet"]); + const version = semver6.clean(versionOutput); + core12.debug(`zstd version: ${version}`); + if (versionOutput === "") { + return constants_1.CompressionMethod.Gzip; + } else { + return constants_1.CompressionMethod.ZstdWithoutLong; + } + }); } - exports2.SEMVER_SPEC_VERSION = "2.0.0"; - var MAX_LENGTH = 256; - var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ - 9007199254740991; - var MAX_SAFE_COMPONENT_LENGTH = 16; - var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; - var re = exports2.re = []; - var safeRe = exports2.safeRe = []; - var src = exports2.src = []; - var t = exports2.tokens = {}; - var R = 0; - function tok(n) { - t[n] = R++; + function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; } - var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; - var safeRegexReplacements = [ - ["\\s", 1], - ["\\d", MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] - ]; - function makeSafeRe(value) { - for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { - var token = safeRegexReplacements[i2][0]; - var max = safeRegexReplacements[i2][1]; - value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); - } - return value; + function getGnuTarPathOnWindows() { + return __awaiter2(this, void 0, void 0, function* () { + if (fs2.existsSync(constants_1.GnuTarPathOnWindows)) { + return constants_1.GnuTarPathOnWindows; + } + const versionOutput = yield getVersion("tar"); + return versionOutput.toLowerCase().includes("gnu tar") ? io4.which("tar") : ""; + }); } - tok("NUMERICIDENTIFIER"); - src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; - tok("NUMERICIDENTIFIERLOOSE"); - src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; - tok("NONNUMERICIDENTIFIER"); - src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; - tok("MAINVERSION"); - src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; - tok("MAINVERSIONLOOSE"); - src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; - tok("PRERELEASEIDENTIFIER"); - src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASEIDENTIFIERLOOSE"); - src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASE"); - src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; - tok("PRERELEASELOOSE"); - src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; - tok("BUILDIDENTIFIER"); - src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; - tok("BUILD"); - src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; - tok("FULL"); - tok("FULLPLAIN"); - src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; - src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; - tok("LOOSEPLAIN"); - src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; - tok("LOOSE"); - src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; - tok("GTLT"); - src[t.GTLT] = "((?:<|>)?=?)"; - tok("XRANGEIDENTIFIERLOOSE"); - src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; - tok("XRANGEIDENTIFIER"); - src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; - tok("XRANGEPLAIN"); - src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGEPLAINLOOSE"); - src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGE"); - src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; - tok("XRANGELOOSE"); - src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COERCE"); - src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; - tok("COERCERTL"); - re[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); - safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); - tok("LONETILDE"); - src[t.LONETILDE] = "(?:~>?)"; - tok("TILDETRIM"); - src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; - re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); - safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); - var tildeTrimReplace = "$1~"; - tok("TILDE"); - src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; - tok("TILDELOOSE"); - src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("LONECARET"); - src[t.LONECARET] = "(?:\\^)"; - tok("CARETTRIM"); - src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; - re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); - safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); - var caretTrimReplace = "$1^"; - tok("CARET"); - src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; - tok("CARETLOOSE"); - src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COMPARATORLOOSE"); - src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; - tok("COMPARATOR"); - src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; - tok("COMPARATORTRIM"); - src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; - re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); - safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); - var comparatorTrimReplace = "$1$2$3"; - tok("HYPHENRANGE"); - src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; - tok("HYPHENRANGELOOSE"); - src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; - tok("STAR"); - src[t.STAR] = "(<|>)?=?\\s*\\*"; - for (i = 0; i < R; i++) { - debug5(i, src[i]); - if (!re[i]) { - re[i] = new RegExp(src[i]); - safeRe[i] = new RegExp(makeSafeRe(src[i])); + function assertDefined(name, value) { + if (value === void 0) { + throw Error(`Expected ${name} but value was undefiend`); } + return value; } - var i; - exports2.parse = parse2; - function parse2(version, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version instanceof SemVer) { - return version; - } - if (typeof version !== "string") { - return null; - } - if (version.length > MAX_LENGTH) { - return null; - } - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; - if (!r.test(version)) { - return null; + function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths.slice(); + if (compressionMethod) { + components.push(compressionMethod); } - try { - return new SemVer(version, options); - } catch (er) { - return null; + if (process.platform === "win32" && !enableCrossOsArchive) { + components.push("windows-only"); } + components.push(versionSalt); + return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); } - exports2.valid = valid2; - function valid2(version, options) { - var v = parse2(version, options); - return v ? v.version : null; + function getRuntimeToken() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"]; + if (!token) { + throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); + } + return token; } - exports2.clean = clean; - function clean(version, options) { - var s = parse2(version.trim().replace(/^[=v]+/, ""), options); - return s ? s.version : null; - } - exports2.SemVer = SemVer; - function SemVer(version, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version; - } else { - version = version.version; - } - } else if (typeof version !== "string") { - throw new TypeError("Invalid Version: " + version); - } - if (version.length > MAX_LENGTH) { - throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); - } - if (!(this instanceof SemVer)) { - return new SemVer(version, options); - } - debug5("SemVer", version, options); - this.options = options; - this.loose = !!options.loose; - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); - if (!m) { - throw new TypeError("Invalid Version: " + version); - } - this.raw = version; - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError("Invalid major version"); - } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError("Invalid minor version"); - } - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError("Invalid patch version"); - } - if (!m[4]) { - this.prerelease = []; - } else { - this.prerelease = m[4].split(".").map(function(id) { - if (/^[0-9]+$/.test(id)) { - var num = +id; - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num; - } - } - return id; - }); - } - this.build = m[5] ? m[5].split(".") : []; - this.format(); + } +}); + +// node_modules/tslib/tslib.es6.mjs +var tslib_es6_exports = {}; +__export(tslib_es6_exports, { + __addDisposableResource: () => __addDisposableResource, + __assign: () => __assign, + __asyncDelegator: () => __asyncDelegator, + __asyncGenerator: () => __asyncGenerator, + __asyncValues: () => __asyncValues, + __await: () => __await, + __awaiter: () => __awaiter, + __classPrivateFieldGet: () => __classPrivateFieldGet, + __classPrivateFieldIn: () => __classPrivateFieldIn, + __classPrivateFieldSet: () => __classPrivateFieldSet, + __createBinding: () => __createBinding, + __decorate: () => __decorate, + __disposeResources: () => __disposeResources, + __esDecorate: () => __esDecorate, + __exportStar: () => __exportStar, + __extends: () => __extends, + __generator: () => __generator, + __importDefault: () => __importDefault, + __importStar: () => __importStar, + __makeTemplateObject: () => __makeTemplateObject, + __metadata: () => __metadata, + __param: () => __param, + __propKey: () => __propKey, + __read: () => __read, + __rest: () => __rest, + __rewriteRelativeImportExtension: () => __rewriteRelativeImportExtension, + __runInitializers: () => __runInitializers, + __setFunctionName: () => __setFunctionName, + __spread: () => __spread, + __spreadArray: () => __spreadArray, + __spreadArrays: () => __spreadArrays, + __values: () => __values2, + default: () => tslib_es6_default +}); +function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { + this.constructor = d; + } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} +function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; } - SemVer.prototype.format = function() { - this.version = this.major + "." + this.minor + "." + this.patch; - if (this.prerelease.length) { - this.version += "-" + this.prerelease.join("."); - } - return this.version; - }; - SemVer.prototype.toString = function() { - return this.version; + return t; +} +function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} +function __param(paramIndex, decorator) { + return function(target, key) { + decorator(target, key, paramIndex); + }; +} +function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { + function accept(f) { + if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); + return f; + } + var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; + var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; + var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); + var _, done = false; + for (var i = decorators.length - 1; i >= 0; i--) { + var context2 = {}; + for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; + for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; + context2.addInitializer = function(f) { + if (done) throw new TypeError("Cannot add initializers after decoration has completed"); + extraInitializers.push(accept(f || null)); }; - SemVer.prototype.compare = function(other) { - debug5("SemVer.compare", this.version, this.options, other); - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); + var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); + if (kind === "accessor") { + if (result === void 0) continue; + if (result === null || typeof result !== "object") throw new TypeError("Object expected"); + if (_ = accept(result.get)) descriptor.get = _; + if (_ = accept(result.set)) descriptor.set = _; + if (_ = accept(result.init)) initializers.unshift(_); + } else if (_ = accept(result)) { + if (kind === "field") initializers.unshift(_); + else descriptor[key] = _; + } + } + if (target) Object.defineProperty(target, contextIn.name, descriptor); + done = true; +} +function __runInitializers(thisArg, initializers, value) { + var useValue = arguments.length > 2; + for (var i = 0; i < initializers.length; i++) { + value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); + } + return useValue ? value : void 0; +} +function __propKey(x) { + return typeof x === "symbol" ? x : "".concat(x); +} +function __setFunctionName(f, name, prefix) { + if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; + return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); +} +function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} +function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - return this.compareMain(other) || this.comparePre(other); - }; - SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} +function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { + if (t[0] & 1) throw t[1]; + return t[1]; + }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); + return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { + return this; + }), g; + function verb(n) { + return function(v) { + return step([n, v]); }; - SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - if (this.prerelease.length && !other.prerelease.length) { - return -1; - } else if (!this.prerelease.length && other.prerelease.length) { - return 1; - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0; - } - var i2 = 0; - do { - var a = this.prerelease[i2]; - var b = other.prerelease[i2]; - debug5("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { + } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (g && (g = 0, op[0] && (_ = 0)), _) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: + case 1: + t = op; + break; + case 4: + _.label++; + return { value: op[1], done: false }; + case 5: + _.label++; + y = op[1]; + op = [0]; continue; - } else { - return compareIdentifiers(a, b); - } - } while (++i2); - }; - SemVer.prototype.compareBuild = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - var i2 = 0; - do { - var a = this.build[i2]; - var b = other.build[i2]; - debug5("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { + case 7: + op = _.ops.pop(); + _.trys.pop(); continue; - } else { - return compareIdentifiers(a, b); - } - } while (++i2); - }; - SemVer.prototype.inc = function(release2, identifier) { - switch (release2) { - case "premajor": - this.prerelease.length = 0; - this.patch = 0; - this.minor = 0; - this.major++; - this.inc("pre", identifier); - break; - case "preminor": - this.prerelease.length = 0; - this.patch = 0; - this.minor++; - this.inc("pre", identifier); - break; - case "prepatch": - this.prerelease.length = 0; - this.inc("patch", identifier); - this.inc("pre", identifier); - break; - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case "prerelease": - if (this.prerelease.length === 0) { - this.inc("patch", identifier); - } - this.inc("pre", identifier); - break; - case "major": - if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { - this.major++; - } - this.minor = 0; - this.patch = 0; - this.prerelease = []; - break; - case "minor": - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++; - } - this.patch = 0; - this.prerelease = []; - break; - case "patch": - if (this.prerelease.length === 0) { - this.patch++; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { + _ = 0; + continue; } - this.prerelease = []; - break; - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case "pre": - if (this.prerelease.length === 0) { - this.prerelease = [0]; - } else { - var i2 = this.prerelease.length; - while (--i2 >= 0) { - if (typeof this.prerelease[i2] === "number") { - this.prerelease[i2]++; - i2 = -2; - } - } - if (i2 === -1) { - this.prerelease.push(0); - } + if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { + _.label = op[1]; + break; } - if (identifier) { - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0]; - } - } else { - this.prerelease = [identifier, 0]; - } + if (op[0] === 6 && _.label < t[1]) { + _.label = t[1]; + t = op; + break; } - break; - default: - throw new Error("invalid increment argument: " + release2); - } - this.format(); - this.raw = this.version; - return this; - }; - exports2.inc = inc; - function inc(version, release2, loose, identifier) { - if (typeof loose === "string") { - identifier = loose; - loose = void 0; - } - try { - return new SemVer(version, loose).inc(release2, identifier).version; - } catch (er) { - return null; - } - } - exports2.diff = diff; - function diff(version1, version2) { - if (eq(version1, version2)) { - return null; - } else { - var v1 = parse2(version1); - var v2 = parse2(version2); - var prefix = ""; - if (v1.prerelease.length || v2.prerelease.length) { - prefix = "pre"; - var defaultResult = "prerelease"; - } - for (var key in v1) { - if (key === "major" || key === "minor" || key === "patch") { - if (v1[key] !== v2[key]) { - return prefix + key; - } + if (t && _.label < t[2]) { + _.label = t[2]; + _.ops.push(op); + break; } - } - return defaultResult; - } - } - exports2.compareIdentifiers = compareIdentifiers; - var numeric = /^[0-9]+$/; - function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - if (anum && bnum) { - a = +a; - b = +b; + if (t[2]) _.ops.pop(); + _.trys.pop(); + continue; } - return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; - } - exports2.rcompareIdentifiers = rcompareIdentifiers; - function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); + op = body.call(thisArg, _); + } catch (e) { + op = [6, e]; + y = 0; + } finally { + f = t = 0; } - exports2.major = major; - function major(a, loose) { - return new SemVer(a, loose).major; + if (op[0] & 5) throw op[1]; + return { value: op[0] ? op[1] : void 0, done: true }; + } +} +function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} +function __values2(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function() { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; } - exports2.minor = minor; - function minor(a, loose) { - return new SemVer(a, loose).minor; + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} +function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } catch (error3) { + e = { error: error3 }; + } finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } finally { + if (e) throw e.error; } - exports2.patch = patch; - function patch(a, loose) { - return new SemVer(a, loose).patch; + } + return ar; +} +function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} +function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} +function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; } - exports2.compare = compare; - function compare(a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)); - } - exports2.compareLoose = compareLoose; - function compareLoose(a, b) { - return compare(a, b, true); - } - exports2.compareBuild = compareBuild; - function compareBuild(a, b, loose) { - var versionA = new SemVer(a, loose); - var versionB = new SemVer(b, loose); - return versionA.compare(versionB) || versionA.compareBuild(versionB); - } - exports2.rcompare = rcompare; - function rcompare(a, b, loose) { - return compare(b, a, loose); + } + return to.concat(ar || Array.prototype.slice.call(from)); +} +function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} +function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { + return this; + }, i; + function awaitReturn(f) { + return function(v) { + return Promise.resolve(v).then(f, reject); + }; + } + function verb(n, f) { + if (g[n]) { + i[n] = function(v) { + return new Promise(function(a, b) { + q.push([n, v, a, b]) > 1 || resume(n, v); + }); + }; + if (f) i[n] = f(i[n]); } - exports2.sort = sort; - function sort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(a, b, loose); - }); + } + function resume(n, v) { + try { + step(g[n](v)); + } catch (e) { + settle(q[0][3], e); } - exports2.rsort = rsort; - function rsort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(b, a, loose); + } + function step(r) { + r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); + } + function fulfill(value) { + resume("next", value); + } + function reject(value) { + resume("throw", value); + } + function settle(f, v) { + if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); + } +} +function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function(e) { + throw e; + }), verb("return"), i[Symbol.iterator] = function() { + return this; + }, i; + function verb(n, f) { + i[n] = o[n] ? function(v) { + return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; + } : f; + } +} +function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values2 === "function" ? __values2(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { + return this; + }, i); + function verb(n) { + i[n] = o[n] && function(v) { + return new Promise(function(resolve2, reject) { + v = o[n](v), settle(resolve2, reject, v.done, v.value); }); + }; + } + function settle(resolve2, reject, d, v) { + Promise.resolve(v).then(function(v2) { + resolve2({ value: v2, done: d }); + }, reject); + } +} +function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { + Object.defineProperty(cooked, "raw", { value: raw }); + } else { + cooked.raw = raw; + } + return cooked; +} +function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + } + __setModuleDefault(result, mod); + return result; +} +function __importDefault(mod) { + return mod && mod.__esModule ? mod : { default: mod }; +} +function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} +function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value; +} +function __classPrivateFieldIn(state, receiver) { + if (receiver === null || typeof receiver !== "object" && typeof receiver !== "function") throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} +function __addDisposableResource(env, value, async) { + if (value !== null && value !== void 0) { + if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); + var dispose, inner; + if (async) { + if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); + dispose = value[Symbol.asyncDispose]; } - exports2.gt = gt; - function gt(a, b, loose) { - return compare(a, b, loose) > 0; - } - exports2.lt = lt; - function lt(a, b, loose) { - return compare(a, b, loose) < 0; - } - exports2.eq = eq; - function eq(a, b, loose) { - return compare(a, b, loose) === 0; - } - exports2.neq = neq; - function neq(a, b, loose) { - return compare(a, b, loose) !== 0; - } - exports2.gte = gte4; - function gte4(a, b, loose) { - return compare(a, b, loose) >= 0; - } - exports2.lte = lte; - function lte(a, b, loose) { - return compare(a, b, loose) <= 0; + if (dispose === void 0) { + if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); + dispose = value[Symbol.dispose]; + if (async) inner = dispose; } - exports2.cmp = cmp; - function cmp(a, op, b, loose) { - switch (op) { - case "===": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a === b; - case "!==": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a !== b; - case "": - case "=": - case "==": - return eq(a, b, loose); - case "!=": - return neq(a, b, loose); - case ">": - return gt(a, b, loose); - case ">=": - return gte4(a, b, loose); - case "<": - return lt(a, b, loose); - case "<=": - return lte(a, b, loose); - default: - throw new TypeError("Invalid operator: " + op); + if (typeof dispose !== "function") throw new TypeError("Object not disposable."); + if (inner) dispose = function() { + try { + inner.call(this); + } catch (e) { + return Promise.reject(e); } - } - exports2.Comparator = Comparator; - function Comparator(comp, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + }; + env.stack.push({ value, dispose, async }); + } else if (async) { + env.stack.push({ async: true }); + } + return value; +} +function __disposeResources(env) { + function fail(e) { + env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; + env.hasError = true; + } + var r, s = 0; + function next() { + while (r = env.stack.pop()) { + try { + if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); + if (r.dispose) { + var result = r.dispose.call(r.value); + if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { + fail(e); + return next(); + }); + } else s |= 1; + } catch (e) { + fail(e); } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp; - } else { - comp = comp.value; + } + if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); + if (env.hasError) throw env.error; + } + return next(); +} +function __rewriteRelativeImportExtension(path4, preserveJsx) { + if (typeof path4 === "string" && /^\.\.?\//.test(path4)) { + return path4.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { + return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; + }); + } + return path4; +} +var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; +var init_tslib_es6 = __esm({ + "node_modules/tslib/tslib.es6.mjs"() { + extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d2, b2) { + d2.__proto__ = b2; + } || function(d2, b2) { + for (var p in b2) if (Object.prototype.hasOwnProperty.call(b2, p)) d2[p] = b2[p]; + }; + return extendStatics(d, b); + }; + __assign = function() { + __assign = Object.assign || function __assign2(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } + return t; + }; + return __assign.apply(this, arguments); + }; + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options); - } - comp = comp.trim().split(/\s+/).join(" "); - debug5("comparator", comp, options); - this.options = options; - this.loose = !!options.loose; - this.parse(comp); - if (this.semver === ANY) { - this.value = ""; - } else { - this.value = this.operator + this.semver.version; + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error3, suppressed, message) { + var e = new Error(message); + return e.name = "SuppressedError", e.error = error3, e.suppressed = suppressed, e; + }; + tslib_es6_default = { + __extends, + __assign, + __rest, + __decorate, + __param, + __esDecorate, + __runInitializers, + __propKey, + __setFunctionName, + __metadata, + __awaiter, + __generator, + __createBinding, + __exportStar, + __values: __values2, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, + __addDisposableResource, + __disposeResources, + __rewriteRelativeImportExtension + }; + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js +var require_AbortError = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AbortError = void 0; + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; } - debug5("comp", this); + }; + exports2.AbortError = AbortError; + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js +var require_log2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.log = log; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_os_1 = require("node:os"); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var node_process_1 = tslib_1.__importDefault(require("node:process")); + function log(message, ...args) { + node_process_1.default.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); } - var ANY = {}; - Comparator.prototype.parse = function(comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var m = comp.match(r); - if (!m) { - throw new TypeError("Invalid comparator: " + comp); - } - this.operator = m[1] !== void 0 ? m[1] : ""; - if (this.operator === "=") { - this.operator = ""; + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js +var require_debug2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var log_js_1 = require_log2(); + var debugEnvVariable = typeof process !== "undefined" && process.env && process.env.DEBUG || void 0; + var enabledString; + var enabledNamespaces = []; + var skippedNamespaces = []; + var debuggers = []; + if (debugEnvVariable) { + enable(debugEnvVariable); + } + var debugObj = Object.assign((namespace) => { + return createDebugger(namespace); + }, { + enable, + enabled, + disable, + log: log_js_1.log + }); + function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const namespaceList = namespaces.split(",").map((ns) => ns.trim()); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(ns.substring(1)); + } else { + enabledNamespaces.push(ns); + } } - if (!m[2]) { - this.semver = ANY; - } else { - this.semver = new SemVer(m[2], this.options.loose); + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); } - }; - Comparator.prototype.toString = function() { - return this.value; - }; - Comparator.prototype.test = function(version) { - debug5("Comparator.test", version, this.options.loose); - if (this.semver === ANY || version === ANY) { + } + function enabled(namespace) { + if (namespace.endsWith("*")) { return true; } - if (typeof version === "string") { - try { - version = new SemVer(version, this.options); - } catch (er) { + for (const skipped of skippedNamespaces) { + if (namespaceMatches(namespace, skipped)) { return false; } } - return cmp(version, this.operator, this.semver, this.options); - }; - Comparator.prototype.intersects = function(comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError("a Comparator is required"); - } - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - var rangeTmp; - if (this.operator === "") { - if (this.value === "") { - return true; - } - rangeTmp = new Range2(comp.value, options); - return satisfies2(this.value, rangeTmp, options); - } else if (comp.operator === "") { - if (comp.value === "") { + for (const enabledNamespace of enabledNamespaces) { + if (namespaceMatches(namespace, enabledNamespace)) { return true; } - rangeTmp = new Range2(this.value, options); - return satisfies2(comp.semver, rangeTmp, options); } - var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); - var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); - var sameSemVer = this.semver.version === comp.semver.version; - var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); - var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); - var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); - return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; - }; - exports2.Range = Range2; - function Range2(range, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + return false; + } + function namespaceMatches(namespace, patternToMatch) { + if (patternToMatch.indexOf("*") === -1) { + return namespace === patternToMatch; } - if (range instanceof Range2) { - if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { - return range; - } else { - return new Range2(range.raw, options); + let pattern = patternToMatch; + if (patternToMatch.indexOf("**") !== -1) { + const patternParts = []; + let lastCharacter = ""; + for (const character of patternToMatch) { + if (character === "*" && lastCharacter === "*") { + continue; + } else { + lastCharacter = character; + patternParts.push(character); + } } + pattern = patternParts.join(""); } - if (range instanceof Comparator) { - return new Range2(range.value, options); - } - if (!(this instanceof Range2)) { - return new Range2(range, options); - } - this.options = options; - this.loose = !!options.loose; - this.includePrerelease = !!options.includePrerelease; - this.raw = range.trim().split(/\s+/).join(" "); - this.set = this.raw.split("||").map(function(range2) { - return this.parseRange(range2.trim()); - }, this).filter(function(c) { - return c.length; - }); - if (!this.set.length) { - throw new TypeError("Invalid SemVer Range: " + this.raw); - } - this.format(); - } - Range2.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(" ").trim(); - }).join("||").trim(); - return this.range; - }; - Range2.prototype.toString = function() { - return this.range; - }; - Range2.prototype.parseRange = function(range) { - var loose = this.options.loose; - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - debug5("hyphen replace", range); - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); - debug5("comparator trim", range, safeRe[t.COMPARATORTRIM]); - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); - range = range.split(/\s+/).join(" "); - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var set2 = range.split(" ").map(function(comp) { - return parseComparator(comp, this.options); - }, this).join(" ").split(/\s+/); - if (this.options.loose) { - set2 = set2.filter(function(comp) { - return !!comp.match(compRe); - }); - } - set2 = set2.map(function(comp) { - return new Comparator(comp, this.options); - }, this); - return set2; - }; - Range2.prototype.intersects = function(range, options) { - if (!(range instanceof Range2)) { - throw new TypeError("a Range is required"); - } - return this.set.some(function(thisComparators) { - return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { - return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { - return rangeComparators.every(function(rangeComparator) { - return thisComparator.intersects(rangeComparator, options); - }); - }); - }); - }); - }; - function isSatisfiable(comparators, options) { - var result = true; - var remainingComparators = comparators.slice(); - var testComparator = remainingComparators.pop(); - while (result && remainingComparators.length) { - result = remainingComparators.every(function(otherComparator) { - return testComparator.intersects(otherComparator, options); - }); - testComparator = remainingComparators.pop(); - } - return result; - } - exports2.toComparators = toComparators; - function toComparators(range, options) { - return new Range2(range, options).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(" ").trim().split(" "); - }); - } - function parseComparator(comp, options) { - debug5("comp", comp, options); - comp = replaceCarets(comp, options); - debug5("caret", comp); - comp = replaceTildes(comp, options); - debug5("tildes", comp); - comp = replaceXRanges(comp, options); - debug5("xrange", comp); - comp = replaceStars(comp, options); - debug5("stars", comp); - return comp; - } - function isX(id) { - return !id || id.toLowerCase() === "x" || id === "*"; - } - function replaceTildes(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceTilde(comp2, options); - }).join(" "); - } - function replaceTilde(comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; - return comp.replace(r, function(_, M, m, p, pr) { - debug5("tilde", comp, _, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else if (pr) { - debug5("replaceTilde pr", pr); - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; - } - debug5("tilde return", ret); - return ret; - }); - } - function replaceCarets(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceCaret(comp2, options); - }).join(" "); - } - function replaceCaret(comp, options) { - debug5("caret", comp, options); - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; - return comp.replace(r, function(_, M, m, p, pr) { - debug5("caret", comp, _, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - if (M === "0") { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; + let namespaceIndex = 0; + let patternIndex = 0; + const patternLength = pattern.length; + const namespaceLength = namespace.length; + let lastWildcard = -1; + let lastWildcardNamespace = -1; + while (namespaceIndex < namespaceLength && patternIndex < patternLength) { + if (pattern[patternIndex] === "*") { + lastWildcard = patternIndex; + patternIndex++; + if (patternIndex === patternLength) { + return true; } - } else if (pr) { - debug5("replaceCaret pr", pr); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + while (namespace[namespaceIndex] !== pattern[patternIndex]) { + namespaceIndex++; + if (namespaceIndex === namespaceLength) { + return false; } - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; } - } else { - debug5("no pr"); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + lastWildcardNamespace = namespaceIndex; + namespaceIndex++; + patternIndex++; + continue; + } else if (pattern[patternIndex] === namespace[namespaceIndex]) { + patternIndex++; + namespaceIndex++; + } else if (lastWildcard >= 0) { + patternIndex = lastWildcard + 1; + namespaceIndex = lastWildcardNamespace + 1; + if (namespaceIndex === namespaceLength) { + return false; + } + while (namespace[namespaceIndex] !== pattern[patternIndex]) { + namespaceIndex++; + if (namespaceIndex === namespaceLength) { + return false; } - } else { - ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; } + lastWildcardNamespace = namespaceIndex; + namespaceIndex++; + patternIndex++; + continue; + } else { + return false; } - debug5("caret return", ret); - return ret; - }); + } + const namespaceDone = namespaceIndex === namespace.length; + const patternDone = patternIndex === pattern.length; + const trailingWildCard = patternIndex === pattern.length - 1 && pattern[patternIndex] === "*"; + return namespaceDone && (patternDone || trailingWildCard); } - function replaceXRanges(comp, options) { - debug5("replaceXRanges", comp, options); - return comp.split(/\s+/).map(function(comp2) { - return replaceXRange(comp2, options); - }).join(" "); + function disable() { + const result = enabledString || ""; + enable(""); + return result; } - function replaceXRange(comp, options) { - comp = comp.trim(); - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - debug5("xRange", comp, ret, gtlt, M, m, p, pr); - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - if (gtlt === "=" && anyX) { - gtlt = ""; + function createDebugger(namespace) { + const newDebugger = Object.assign(debug5, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend: extend3 + }); + function debug5(...args) { + if (!newDebugger.enabled) { + return; } - pr = options.includePrerelease ? "-0" : ""; - if (xM) { - if (gtlt === ">" || gtlt === "<") { - ret = "<0.0.0-0"; - } else { - ret = "*"; - } - } else if (gtlt && anyX) { - if (xm) { - m = 0; - } - p = 0; - if (gtlt === ">") { - gtlt = ">="; - if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else { - m = +m + 1; - p = 0; - } - } else if (gtlt === "<=") { - gtlt = "<"; - if (xm) { - M = +M + 1; - } else { - m = +m + 1; - } - } - ret = gtlt + M + "." + m + "." + p + pr; - } else if (xm) { - ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; - } else if (xp) { - ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; } - debug5("xRange return", ret); - return ret; - }); - } - function replaceStars(comp, options) { - debug5("replaceStars", comp, options); - return comp.trim().replace(safeRe[t.STAR], ""); - } - function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = ""; - } else if (isX(fm)) { - from = ">=" + fM + ".0.0"; - } else if (isX(fp)) { - from = ">=" + fM + "." + fm + ".0"; - } else { - from = ">=" + from; - } - if (isX(tM)) { - to = ""; - } else if (isX(tm)) { - to = "<" + (+tM + 1) + ".0.0"; - } else if (isX(tp)) { - to = "<" + tM + "." + (+tm + 1) + ".0"; - } else if (tpr) { - to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; - } else { - to = "<=" + to; + newDebugger.log(...args); } - return (from + " " + to).trim(); + debuggers.push(newDebugger); + return newDebugger; } - Range2.prototype.test = function(version) { - if (!version) { - return false; - } - if (typeof version === "string") { - try { - version = new SemVer(version, this.options); - } catch (er) { - return false; - } - } - for (var i2 = 0; i2 < this.set.length; i2++) { - if (testSet(this.set[i2], version, this.options)) { - return true; - } + function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; } return false; + } + function extend3(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; + } + exports2.default = debugObj; + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js +var require_logger = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TypeSpecRuntimeLogger = void 0; + exports2.createLoggerContext = createLoggerContext; + exports2.setLogLevel = setLogLevel; + exports2.getLogLevel = getLogLevel; + exports2.createClientLogger = createClientLogger; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var debug_js_1 = tslib_1.__importDefault(require_debug2()); + var TYPESPEC_RUNTIME_LOG_LEVELS = ["verbose", "info", "warning", "error"]; + var levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 }; - function testSet(set2, version, options) { - for (var i2 = 0; i2 < set2.length; i2++) { - if (!set2[i2].test(version)) { - return false; - } - } - if (version.prerelease.length && !options.includePrerelease) { - for (i2 = 0; i2 < set2.length; i2++) { - debug5(set2[i2].semver); - if (set2[i2].semver === ANY) { - continue; - } - if (set2[i2].semver.prerelease.length > 0) { - var allowed = set2[i2].semver; - if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { - return true; - } - } - } - return false; - } - return true; + function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; } - exports2.satisfies = satisfies2; - function satisfies2(version, range, options) { - try { - range = new Range2(range, options); - } catch (er) { - return false; - } - return range.test(version); + function isTypeSpecRuntimeLogLevel(level) { + return TYPESPEC_RUNTIME_LOG_LEVELS.includes(level); } - exports2.maxSatisfying = maxSatisfying; - function maxSatisfying(versions, range, options) { - var max = null; - var maxSV = null; - try { - var rangeObj = new Range2(range, options); - } catch (er) { - return null; - } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!max || maxSV.compare(v) === -1) { - max = v; - maxSV = new SemVer(max, options); + function createLoggerContext(options) { + const registeredLoggers = /* @__PURE__ */ new Set(); + const logLevelFromEnv = typeof process !== "undefined" && process.env && process.env[options.logLevelEnvVarName] || void 0; + let logLevel; + const clientLogger = (0, debug_js_1.default)(options.namespace); + clientLogger.log = (...args) => { + debug_js_1.default.log(...args); + }; + function contextSetLogLevel(level) { + if (level && !isTypeSpecRuntimeLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(",")}`); + } + logLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); } } - }); - return max; - } - exports2.minSatisfying = minSatisfying; - function minSatisfying(versions, range, options) { - var min = null; - var minSV = null; - try { - var rangeObj = new Range2(range, options); - } catch (er) { - return null; + debug_js_1.default.enable(enabledNamespaces.join(",")); } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!min || minSV.compare(v) === 1) { - min = v; - minSV = new SemVer(min, options); - } + if (logLevelFromEnv) { + if (isTypeSpecRuntimeLogLevel(logLevelFromEnv)) { + contextSetLogLevel(logLevelFromEnv); + } else { + console.error(`${options.logLevelEnvVarName} set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(", ")}.`); } - }); - return min; - } - exports2.minVersion = minVersion; - function minVersion(range, loose) { - range = new Range2(range, loose); - var minver = new SemVer("0.0.0"); - if (range.test(minver)) { - return minver; } - minver = new SemVer("0.0.0-0"); - if (range.test(minver)) { - return minver; + function shouldEnable(logger) { + return Boolean(logLevel && levelMap[logger.level] <= levelMap[logLevel]); } - minver = null; - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - comparators.forEach(function(comparator) { - var compver = new SemVer(comparator.semver.version); - switch (comparator.operator) { - case ">": - if (compver.prerelease.length === 0) { - compver.patch++; - } else { - compver.prerelease.push(0); - } - compver.raw = compver.format(); - /* fallthrough */ - case "": - case ">=": - if (!minver || gt(minver, compver)) { - minver = compver; - } - break; - case "<": - case "<=": - break; - /* istanbul ignore next */ - default: - throw new Error("Unexpected operation: " + comparator.operator); - } + function createLogger2(parent, level) { + const logger = Object.assign(parent.extend(level), { + level }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug_js_1.default.disable(); + debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; } - if (minver && range.test(minver)) { - return minver; + function contextGetLogLevel() { + return logLevel; } - return null; - } - exports2.validRange = validRange; - function validRange(range, options) { - try { - return new Range2(range, options).range || "*"; - } catch (er) { - return null; + function contextCreateClientLogger(namespace) { + const clientRootLogger = clientLogger.extend(namespace); + patchLogMethod(clientLogger, clientRootLogger); + return { + error: createLogger2(clientRootLogger, "error"), + warning: createLogger2(clientRootLogger, "warning"), + info: createLogger2(clientRootLogger, "info"), + verbose: createLogger2(clientRootLogger, "verbose") + }; } + return { + setLogLevel: contextSetLogLevel, + getLogLevel: contextGetLogLevel, + createClientLogger: contextCreateClientLogger, + logger: clientLogger + }; } - exports2.ltr = ltr; - function ltr(version, range, options) { - return outside(version, range, "<", options); + var context2 = createLoggerContext({ + logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", + namespace: "typeSpecRuntime" + }); + exports2.TypeSpecRuntimeLogger = context2.logger; + function setLogLevel(logLevel) { + context2.setLogLevel(logLevel); } - exports2.gtr = gtr; - function gtr(version, range, options) { - return outside(version, range, ">", options); + function getLogLevel() { + return context2.getLogLevel(); } - exports2.outside = outside; - function outside(version, range, hilo, options) { - version = new SemVer(version, options); - range = new Range2(range, options); - var gtfn, ltefn, ltfn, comp, ecomp; - switch (hilo) { - case ">": - gtfn = gt; - ltefn = lte; - ltfn = lt; - comp = ">"; - ecomp = ">="; - break; - case "<": - gtfn = lt; - ltefn = gte4; - ltfn = gt; - comp = "<"; - ecomp = "<="; - break; - default: - throw new TypeError('Must provide a hilo val of "<" or ">"'); - } - if (satisfies2(version, range, options)) { - return false; + function createClientLogger(namespace) { + return context2.createClientLogger(namespace); + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js +var require_httpHeaders = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpHeaders = createHttpHeaders; + function normalizeName(name) { + return name.toLowerCase(); + } + function* headerIterator(map2) { + for (const entry of map2.values()) { + yield [entry.name, entry.value]; } - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - var high = null; - var low = null; - comparators.forEach(function(comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator(">=0.0.0"); - } - high = high || comparator; - low = low || comparator; - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator; - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator; + } + var HttpHeadersImpl = class { + _headersMap; + constructor(rawHeaders) { + this._headersMap = /* @__PURE__ */ new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); } - }); - if (high.operator === comp || high.operator === ecomp) { - return false; - } - if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { - return false; - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false; } } - return true; - } - exports2.prerelease = prerelease; - function prerelease(version, options) { - var parsed = parse2(version, options); - return parsed && parsed.prerelease.length ? parsed.prerelease : null; - } - exports2.intersects = intersects; - function intersects(r1, r2, options) { - r1 = new Range2(r1, options); - r2 = new Range2(r2, options); - return r1.intersects(r2); - } - exports2.coerce = coerce2; - function coerce2(version, options) { - if (version instanceof SemVer) { - return version; + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); } - if (typeof version === "number") { - version = String(version); + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + return this._headersMap.get(normalizeName(name))?.value; } - if (typeof version !== "string") { - return null; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); } - options = options || {}; - var match = null; - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]); - } else { - var next; - while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length)) { - if (!match || next.index + next[0].length !== match.index + match[0].length) { - match = next; + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; } - safeRe[t.COERCERTL].lastIndex = -1; + return result; } - if (match === null) { - return null; + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); } - return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } + }; + function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); } } }); -// node_modules/@actions/cache/lib/internal/constants.js -var require_constants12 = __commonJS({ - "node_modules/@actions/cache/lib/internal/constants.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js +var require_schemes = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheFileSizeLimit = exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; - var CacheFilename; - (function(CacheFilename2) { - CacheFilename2["Gzip"] = "cache.tgz"; - CacheFilename2["Zstd"] = "cache.tzst"; - })(CacheFilename || (exports2.CacheFilename = CacheFilename = {})); - var CompressionMethod; - (function(CompressionMethod2) { - CompressionMethod2["Gzip"] = "gzip"; - CompressionMethod2["ZstdWithoutLong"] = "zstd-without-long"; - CompressionMethod2["Zstd"] = "zstd"; - })(CompressionMethod || (exports2.CompressionMethod = CompressionMethod = {})); - var ArchiveToolType; - (function(ArchiveToolType2) { - ArchiveToolType2["GNU"] = "gnu"; - ArchiveToolType2["BSD"] = "bsd"; - })(ArchiveToolType || (exports2.ArchiveToolType = ArchiveToolType = {})); - exports2.DefaultRetryAttempts = 2; - exports2.DefaultRetryDelay = 5e3; - exports2.SocketTimeout = 5e3; - exports2.GnuTarPathOnWindows = `${process.env["PROGRAMFILES"]}\\Git\\usr\\bin\\tar.exe`; - exports2.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; - exports2.TarFilename = "cache.tar"; - exports2.ManifestFilename = "manifest.txt"; - exports2.CacheFileSizeLimit = 10 * Math.pow(1024, 3); } }); -// node_modules/@actions/cache/lib/internal/cacheUtils.js -var require_cacheUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/cacheUtils.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js +var require_oauth2Flows = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js +var require_uuidUtils = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.randomUUID = randomUUID; + function randomUUID() { + return crypto.randomUUID(); + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js +var require_pipelineRequest = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createPipelineRequest = createPipelineRequest; + var httpHeaders_js_1 = require_httpHeaders(); + var uuidUtils_js_1 = require_uuidUtils(); + var PipelineRequestImpl = class { + url; + method; + headers; + timeout; + withCredentials; + body; + multipartBody; + formData; + streamResponseStatusCodes; + enableBrowserStreams; + proxySettings; + disableKeepAlive; + abortSignal; + requestId; + allowInsecureConnection; + onUploadProgress; + onDownloadProgress; + requestOverrides; + authSchemes; + constructor(options) { + this.url = options.url; + this.body = options.body; + this.headers = options.headers ?? (0, httpHeaders_js_1.createHttpHeaders)(); + this.method = options.method ?? "GET"; + this.timeout = options.timeout ?? 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = options.disableKeepAlive ?? false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = options.withCredentials ?? false; + this.abortSignal = options.abortSignal; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || (0, uuidUtils_js_1.randomUUID)(); + this.allowInsecureConnection = options.allowInsecureConnection ?? false; + this.enableBrowserStreams = options.enableBrowserStreams ?? false; + this.requestOverrides = options.requestOverrides; + this.authSchemes = options.authSchemes; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + }; + function createPipelineRequest(options) { + return new PipelineRequestImpl(options); + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js +var require_pipeline = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createEmptyPipeline = createEmptyPipeline; + var ValidPhaseNames = /* @__PURE__ */ new Set(["Deserialize", "Serialize", "Retry", "Sign"]); + var HttpPipeline = class _HttpPipeline { + _policies = []; + _orderedPolicies; + constructor(policies) { + this._policies = policies?.slice(0) ?? []; + this._orderedPolicies = void 0; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options }); + this._orderedPolicies = void 0; } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if (options.name && policyDescriptor.policy.name === options.name || options.phase && policyDescriptor.options.phase === options.phase) { + removedPolicies.push(policyDescriptor.policy); + return false; + } else { + return true; } + }); + this._orderedPolicies = void 0; + return removedPolicies; + } + sendRequest(httpClient, request2) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request2); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __asyncValues2 = exports2 && exports2.__asyncValues || function(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve2, reject) { - v = o[n](v), settle(resolve2, reject, v.done, v.value); - }); - }; + return this._orderedPolicies; } - function settle(resolve2, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve2({ value: v2, done: d }); - }, reject); + clone() { + return new _HttpPipeline(this._policies); } - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTempDirectory = createTempDirectory; - exports2.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; - exports2.resolvePaths = resolvePaths; - exports2.unlinkFile = unlinkFile; - exports2.getCompressionMethod = getCompressionMethod; - exports2.getCacheFileName = getCacheFileName; - exports2.getGnuTarPathOnWindows = getGnuTarPathOnWindows; - exports2.assertDefined = assertDefined; - exports2.getCacheVersion = getCacheVersion; - exports2.getRuntimeToken = getRuntimeToken; - var core12 = __importStar2(require_core()); - var exec3 = __importStar2(require_exec()); - var glob = __importStar2(require_glob()); - var io4 = __importStar2(require_io()); - var crypto2 = __importStar2(require("crypto")); - var fs = __importStar2(require("fs")); - var path3 = __importStar2(require("path")); - var semver6 = __importStar2(require_semver3()); - var util = __importStar2(require("util")); - var constants_1 = require_constants12(); - var versionSalt = "1.0"; - function createTempDirectory() { - return __awaiter2(this, void 0, void 0, function* () { - const IS_WINDOWS = process.platform === "win32"; - let tempDirectory = process.env["RUNNER_TEMP"] || ""; - if (!tempDirectory) { - let baseLocation; - if (IS_WINDOWS) { - baseLocation = process.env["USERPROFILE"] || "C:\\"; + static create() { + return new _HttpPipeline(); + } + orderPolicies() { + const result = []; + const policyMap = /* @__PURE__ */ new Map(); + function createPhase(name) { + return { + name, + policies: /* @__PURE__ */ new Set(), + hasRun: false, + hasAfterPolicies: false + }; + } + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } else if (phase === "Serialize") { + return serializePhase; + } else if (phase === "Deserialize") { + return deserializePhase; + } else if (phase === "Sign") { + return signPhase; } else { - if (process.platform === "darwin") { - baseLocation = "/Users"; - } else { - baseLocation = "/home"; - } + return noPhase; } - tempDirectory = path3.join(baseLocation, "actions", "temp"); } - const dest = path3.join(tempDirectory, crypto2.randomUUID()); - yield io4.mkdirP(dest); - return dest; - }); - } - function getArchiveFileSizeInBytes(filePath) { - return fs.statSync(filePath).size; - } - function resolvePaths(patterns) { - return __awaiter2(this, void 0, void 0, function* () { - var _a, e_1, _b, _c; - var _d; - const paths = []; - const workspace = (_d = process.env["GITHUB_WORKSPACE"]) !== null && _d !== void 0 ? _d : process.cwd(); - const globber = yield glob.create(patterns.join("\n"), { - implicitDescendants: false - }); - try { - for (var _e = true, _f = __asyncValues2(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { - _c = _g.value; - _e = false; - const file = _c; - const relativeFile = path3.relative(workspace, file).replace(new RegExp(`\\${path3.sep}`, "g"), "/"); - core12.debug(`Matched: ${relativeFile}`); - if (relativeFile === "") { - paths.push("."); - } else { - paths.push(`${relativeFile}`); + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: /* @__PURE__ */ new Set(), + dependants: /* @__PURE__ */ new Set() + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } } } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); - } finally { - if (e_1) throw e_1.error; + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } } } - return paths; - }); - } - function unlinkFile(filePath) { - return __awaiter2(this, void 0, void 0, function* () { - return util.promisify(fs.unlink)(filePath); - }); - } - function getVersion(app_1) { - return __awaiter2(this, arguments, void 0, function* (app, additionalArgs = []) { - let versionOutput = ""; - additionalArgs.push("--version"); - core12.debug(`Checking ${app} ${additionalArgs.join(" ")}`); - try { - yield exec3.exec(`${app}`, additionalArgs, { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => versionOutput += data.toString(), - stderr: (data) => versionOutput += data.toString() + function walkPhase(phase) { + phase.hasRun = true; + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + continue; } - }); - } catch (err) { - core12.debug(err.message); + if (node.dependsOn.size === 0) { + result.push(node.policy); + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } } - versionOutput = versionOutput.trim(); - core12.debug(versionOutput); - return versionOutput; - }); - } - function getCompressionMethod() { - return __awaiter2(this, void 0, void 0, function* () { - const versionOutput = yield getVersion("zstd", ["--quiet"]); - const version = semver6.clean(versionOutput); - core12.debug(`zstd version: ${version}`); - if (versionOutput === "") { - return constants_1.CompressionMethod.Gzip; - } else { - return constants_1.CompressionMethod.ZstdWithoutLong; + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + walkPhase(noPhase); + } + return; + } + if (phase.hasAfterPolicies) { + walkPhase(noPhase); + } + } } - }); - } - function getCacheFileName(compressionMethod) { - return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; - } - function getGnuTarPathOnWindows() { - return __awaiter2(this, void 0, void 0, function* () { - if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { - return constants_1.GnuTarPathOnWindows; + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + walkPhases(); + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } } - const versionOutput = yield getVersion("tar"); - return versionOutput.toLowerCase().includes("gnu tar") ? io4.which("tar") : ""; - }); - } - function assertDefined(name, value) { - if (value === void 0) { - throw Error(`Expected ${name} but value was undefiend`); - } - return value; - } - function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { - const components = paths.slice(); - if (compressionMethod) { - components.push(compressionMethod); - } - if (process.platform === "win32" && !enableCrossOsArchive) { - components.push("windows-only"); - } - components.push(versionSalt); - return crypto2.createHash("sha256").update(components.join("|")).digest("hex"); - } - function getRuntimeToken() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"]; - if (!token) { - throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); + return result; } - return token; + }; + function createEmptyPipeline() { + return HttpPipeline.create(); } } }); -// node_modules/tslib/tslib.es6.mjs -var tslib_es6_exports = {}; -__export(tslib_es6_exports, { - __addDisposableResource: () => __addDisposableResource, - __assign: () => __assign, - __asyncDelegator: () => __asyncDelegator, - __asyncGenerator: () => __asyncGenerator, - __asyncValues: () => __asyncValues, - __await: () => __await, - __awaiter: () => __awaiter, - __classPrivateFieldGet: () => __classPrivateFieldGet, - __classPrivateFieldIn: () => __classPrivateFieldIn, - __classPrivateFieldSet: () => __classPrivateFieldSet, - __createBinding: () => __createBinding, - __decorate: () => __decorate, - __disposeResources: () => __disposeResources, - __esDecorate: () => __esDecorate, - __exportStar: () => __exportStar, - __extends: () => __extends, - __generator: () => __generator, - __importDefault: () => __importDefault, - __importStar: () => __importStar, - __makeTemplateObject: () => __makeTemplateObject, - __metadata: () => __metadata, - __param: () => __param, - __propKey: () => __propKey, - __read: () => __read, - __rest: () => __rest, - __rewriteRelativeImportExtension: () => __rewriteRelativeImportExtension, - __runInitializers: () => __runInitializers, - __setFunctionName: () => __setFunctionName, - __spread: () => __spread, - __spreadArray: () => __spreadArray, - __spreadArrays: () => __spreadArrays, - __values: () => __values2, - default: () => tslib_es6_default -}); -function __extends(d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { - this.constructor = d; - } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} -function __rest(s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js +var require_object = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isObject = isObject2; + function isObject2(input) { + return typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date); } - return t; -} -function __decorate(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; -} -function __param(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; -} -function __esDecorate(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { - function accept(f) { - if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); - return f; } - var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; - var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; - var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); - var _, done = false; - for (var i = decorators.length - 1; i >= 0; i--) { - var context2 = {}; - for (var p in contextIn) context2[p] = p === "access" ? {} : contextIn[p]; - for (var p in contextIn.access) context2.access[p] = contextIn.access[p]; - context2.addInitializer = function(f) { - if (done) throw new TypeError("Cannot add initializers after decoration has completed"); - extraInitializers.push(accept(f || null)); - }; - var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context2); - if (kind === "accessor") { - if (result === void 0) continue; - if (result === null || typeof result !== "object") throw new TypeError("Object expected"); - if (_ = accept(result.get)) descriptor.get = _; - if (_ = accept(result.set)) descriptor.set = _; - if (_ = accept(result.init)) initializers.unshift(_); - } else if (_ = accept(result)) { - if (kind === "field") initializers.unshift(_); - else descriptor[key] = _; +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js +var require_error = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isError = isError; + var object_js_1 = require_object(); + function isError(e) { + if ((0, object_js_1.isObject)(e)) { + const hasName = typeof e.name === "string"; + const hasMessage = typeof e.message === "string"; + return hasName && hasMessage; + } + return false; } } - if (target) Object.defineProperty(target, contextIn.name, descriptor); - done = true; -} -function __runInitializers(thisArg, initializers, value) { - var useValue = arguments.length > 2; - for (var i = 0; i < initializers.length; i++) { - value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); - } - return useValue ? value : void 0; -} -function __propKey(x) { - return typeof x === "symbol" ? x : "".concat(x); -} -function __setFunctionName(f, name, prefix) { - if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; - return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); -} -function __metadata(metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); -} -function __awaiter(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js +var require_inspect = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.custom = void 0; + var node_util_1 = require("node:util"); + exports2.custom = node_util_1.inspect.custom; } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js +var require_sanitizer = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Sanitizer = void 0; + var object_js_1 = require_object(); + var RedactedString = "REDACTED"; + var defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate" + ]; + var defaultAllowedQueryParameters = ["api-version"]; + var Sanitizer = class { + allowedHeaderNames; + allowedQueryParameters; + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [] } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -} -function __generator(thisArg, body) { - var _ = { label: 0, sent: function() { - if (t[0] & 1) throw t[1]; - return t[1]; - }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); - return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { - return this; - }), g; - function verb(n) { - return function(v) { - return step([n, v]); - }; - } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (g && (g = 0, op[0] && (_ = 0)), _) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: - case 1: - t = op; - break; - case 4: - _.label++; - return { value: op[1], done: false }; - case 5: - _.label++; - y = op[1]; - op = [0]; - continue; - case 7: - op = _.ops.pop(); - _.trys.pop(); - continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { - _ = 0; - continue; - } - if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { - _.label = op[1]; - break; + /** + * Sanitizes an object for logging. + * @param obj - The object to sanitize + * @returns - The sanitized object as a string + */ + sanitize(obj) { + const seen = /* @__PURE__ */ new Set(); + return JSON.stringify(obj, (key, value) => { + if (value instanceof Error) { + return { + ...value, + name: value.name, + message: value.message + }; } - if (op[0] === 6 && _.label < t[1]) { - _.label = t[1]; - t = op; - break; + if (key === "headers") { + return this.sanitizeHeaders(value); + } else if (key === "url") { + return this.sanitizeUrl(value); + } else if (key === "query") { + return this.sanitizeQuery(value); + } else if (key === "body") { + return void 0; + } else if (key === "response") { + return void 0; + } else if (key === "operationSpec") { + return void 0; + } else if (Array.isArray(value) || (0, object_js_1.isObject)(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); } - if (t && _.label < t[2]) { - _.label = t[2]; - _.ops.push(op); - break; + return value; + }, 2); + } + /** + * Sanitizes a URL for logging. + * @param value - The URL to sanitize + * @returns - The sanitized URL as a string + */ + sanitizeUrl(value) { + if (typeof value !== "string" || value === null || value === "") { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); } - if (t[2]) _.ops.pop(); - _.trys.pop(); - continue; + } + return url.toString(); } - op = body.call(thisArg, _); - } catch (e) { - op = [6, e]; - y = 0; - } finally { - f = t = 0; - } - if (op[0] & 5) throw op[1]; - return { value: op[0] ? op[1] : void 0, done: true }; - } -} -function __exportStar(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); -} -function __values2(o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function() { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); -} -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } catch (error3) { - e = { error: error3 }; - } finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } finally { - if (e) throw e.error; - } - } - return ar; -} -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} -function __spreadArrays() { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; -} -function __spreadArray(to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -} -function __await(v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); -} -function __asyncGenerator(thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { - return this; - }, i; - function awaitReturn(f) { - return function(v) { - return Promise.resolve(v).then(f, reject); - }; - } - function verb(n, f) { - if (g[n]) { - i[n] = function(v) { - return new Promise(function(a, b) { - q.push([n, v, a, b]) > 1 || resume(n, v); - }); - }; - if (f) i[n] = f(i[n]); - } - } - function resume(n, v) { - try { - step(g[n](v)); - } catch (e) { - settle(q[0][3], e); - } - } - function step(r) { - r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); - } - function fulfill(value) { - resume("next", value); - } - function reject(value) { - resume("throw", value); - } - function settle(f, v) { - if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); - } -} -function __asyncDelegator(o) { - var i, p; - return i = {}, verb("next"), verb("throw", function(e) { - throw e; - }), verb("return"), i[Symbol.iterator] = function() { - return this; - }, i; - function verb(n, f) { - i[n] = o[n] ? function(v) { - return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; - } : f; - } -} -function __asyncValues(o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values2 === "function" ? __values2(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { - return this; - }, i); - function verb(n) { - i[n] = o[n] && function(v) { - return new Promise(function(resolve2, reject) { - v = o[n](v), settle(resolve2, reject, v.done, v.value); - }); - }; - } - function settle(resolve2, reject, d, v) { - Promise.resolve(v).then(function(v2) { - resolve2({ value: v2, done: d }); - }, reject); - } -} -function __makeTemplateObject(cooked, raw) { - if (Object.defineProperty) { - Object.defineProperty(cooked, "raw", { value: raw }); - } else { - cooked.raw = raw; - } - return cooked; -} -function __importStar(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); - } - __setModuleDefault(result, mod); - return result; -} -function __importDefault(mod) { - return mod && mod.__esModule ? mod : { default: mod }; -} -function __classPrivateFieldGet(receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -} -function __classPrivateFieldSet(receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value; -} -function __classPrivateFieldIn(state, receiver) { - if (receiver === null || typeof receiver !== "object" && typeof receiver !== "function") throw new TypeError("Cannot use 'in' operator on non-object"); - return typeof state === "function" ? receiver === state : state.has(receiver); -} -function __addDisposableResource(env, value, async) { - if (value !== null && value !== void 0) { - if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); - var dispose, inner; - if (async) { - if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); - dispose = value[Symbol.asyncDispose]; - } - if (dispose === void 0) { - if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); - dispose = value[Symbol.dispose]; - if (async) inner = dispose; - } - if (typeof dispose !== "function") throw new TypeError("Object not disposable."); - if (inner) dispose = function() { - try { - inner.call(this); - } catch (e) { - return Promise.reject(e); - } - }; - env.stack.push({ value, dispose, async }); - } else if (async) { - env.stack.push({ async: true }); - } - return value; -} -function __disposeResources(env) { - function fail(e) { - env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; - env.hasError = true; - } - var r, s = 0; - function next() { - while (r = env.stack.pop()) { - try { - if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); - if (r.dispose) { - var result = r.dispose.call(r.value); - if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { - fail(e); - return next(); - }); - } else s |= 1; - } catch (e) { - fail(e); + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } else { + sanitized[key] = RedactedString; + } + } + return sanitized; } - } - if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); - if (env.hasError) throw env.error; - } - return next(); -} -function __rewriteRelativeImportExtension(path3, preserveJsx) { - if (typeof path3 === "string" && /^\.\.?\//.test(path3)) { - return path3.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m, tsx, d, ext, cm) { - return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : d + ext + "." + cm.toLowerCase() + "js"; - }); - } - return path3; -} -var extendStatics, __assign, __createBinding, __setModuleDefault, ownKeys, _SuppressedError, tslib_es6_default; -var init_tslib_es6 = __esm({ - "node_modules/tslib/tslib.es6.mjs"() { - extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d2, b2) { - d2.__proto__ = b2; - } || function(d2, b2) { - for (var p in b2) if (Object.prototype.hasOwnProperty.call(b2, p)) d2[p] = b2[p]; - }; - return extendStatics(d, b); - }; - __assign = function() { - __assign = Object.assign || function __assign2(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; } - return t; - }; - return __assign.apply(this, arguments); - }; - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } else { + sanitized[k] = RedactedString; + } + } + return sanitized; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - ownKeys = function(o) { - ownKeys = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys(o); - }; - _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error3, suppressed, message) { - var e = new Error(message); - return e.name = "SuppressedError", e.error = error3, e.suppressed = suppressed, e; - }; - tslib_es6_default = { - __extends, - __assign, - __rest, - __decorate, - __param, - __esDecorate, - __runInitializers, - __propKey, - __setFunctionName, - __metadata, - __awaiter, - __generator, - __createBinding, - __exportStar, - __values: __values2, - __read, - __spread, - __spreadArrays, - __spreadArray, - __await, - __asyncGenerator, - __asyncDelegator, - __asyncValues, - __makeTemplateObject, - __importStar, - __importDefault, - __classPrivateFieldGet, - __classPrivateFieldSet, - __classPrivateFieldIn, - __addDisposableResource, - __disposeResources, - __rewriteRelativeImportExtension }; + exports2.Sanitizer = Sanitizer; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js -var require_AbortError = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js +var require_restError = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AbortError = void 0; - var AbortError = class extends Error { - constructor(message) { + exports2.RestError = void 0; + exports2.isRestError = isRestError; + var error_js_1 = require_error(); + var inspect_js_1 = require_inspect(); + var sanitizer_js_1 = require_sanitizer(); + var errorSanitizer = new sanitizer_js_1.Sanitizer(); + var RestError = class _RestError extends Error { + /** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ + static REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + /** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ + static PARSE_ERROR = "PARSE_ERROR"; + /** + * The code of the error itself (use statics on RestError if possible.) + */ + code; + /** + * The HTTP status code of the request (if applicable.) + */ + statusCode; + /** + * The request that was made. + * This property is non-enumerable. + */ + request; + /** + * The response received (if any.) + * This property is non-enumerable. + */ + response; + /** + * Bonus property set by the throw site. + */ + details; + constructor(message, options = {}) { super(message); - this.name = "AbortError"; + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + Object.defineProperty(this, "request", { value: options.request, enumerable: false }); + Object.defineProperty(this, "response", { value: options.response, enumerable: false }); + const agent = this.request?.agent ? { + maxFreeSockets: this.request.agent.maxFreeSockets, + maxSockets: this.request.agent.maxSockets + } : void 0; + Object.defineProperty(this, inspect_js_1.custom, { + value: () => { + return `RestError: ${this.message} + ${errorSanitizer.sanitize({ + ...this, + request: { ...this.request, agent }, + response: this.response + })}`; + }, + enumerable: false + }); + Object.setPrototypeOf(this, _RestError.prototype); } }; - exports2.AbortError = AbortError; + exports2.RestError = RestError; + function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return (0, error_js_1.isError)(e) && e.name === "RestError"; + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js -var require_log2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js +var require_bytesEncoding = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.log = log; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_os_1 = require("node:os"); - var node_util_1 = tslib_1.__importDefault(require("node:util")); - var node_process_1 = tslib_1.__importDefault(require("node:process")); - function log(message, ...args) { - node_process_1.default.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); + exports2.uint8ArrayToString = uint8ArrayToString; + exports2.stringToUint8Array = stringToUint8Array; + function uint8ArrayToString(bytes, format) { + return Buffer.from(bytes).toString(format); + } + function stringToUint8Array(value, format) { + return Buffer.from(value, format); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js -var require_debug2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js +var require_log3 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var log_js_1 = require_log2(); - var debugEnvVariable = typeof process !== "undefined" && process.env && process.env.DEBUG || void 0; - var enabledString; - var enabledNamespaces = []; - var skippedNamespaces = []; - var debuggers = []; - if (debugEnvVariable) { - enable(debugEnvVariable); + exports2.logger = void 0; + var logger_js_1 = require_logger(); + exports2.logger = (0, logger_js_1.createClientLogger)("ts-http-runtime"); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js +var require_nodeHttpClient = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getBodyLength = getBodyLength; + exports2.createNodeHttpClient = createNodeHttpClient; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_http_1 = tslib_1.__importDefault(require("node:http")); + var node_https_1 = tslib_1.__importDefault(require("node:https")); + var node_zlib_1 = tslib_1.__importDefault(require("node:zlib")); + var node_stream_1 = require("node:stream"); + var AbortError_js_1 = require_AbortError(); + var httpHeaders_js_1 = require_httpHeaders(); + var restError_js_1 = require_restError(); + var log_js_1 = require_log3(); + var sanitizer_js_1 = require_sanitizer(); + var DEFAULT_TLS_SETTINGS = {}; + function isReadableStream(body) { + return body && typeof body.pipe === "function"; } - var debugObj = Object.assign((namespace) => { - return createDebugger(namespace); - }, { - enable, - enabled, - disable, - log: log_js_1.log - }); - function enable(namespaces) { - enabledString = namespaces; - enabledNamespaces = []; - skippedNamespaces = []; - const namespaceList = namespaces.split(",").map((ns) => ns.trim()); - for (const ns of namespaceList) { - if (ns.startsWith("-")) { - skippedNamespaces.push(ns.substring(1)); - } else { - enabledNamespaces.push(ns); - } - } - for (const instance of debuggers) { - instance.enabled = enabled(instance.namespace); + function isStreamComplete(stream) { + if (stream.readable === false) { + return Promise.resolve(); } + return new Promise((resolve2) => { + const handler2 = () => { + resolve2(); + stream.removeListener("close", handler2); + stream.removeListener("end", handler2); + stream.removeListener("error", handler2); + }; + stream.on("close", handler2); + stream.on("end", handler2); + stream.on("error", handler2); + }); } - function enabled(namespace) { - if (namespace.endsWith("*")) { - return true; - } - for (const skipped of skippedNamespaces) { - if (namespaceMatches(namespace, skipped)) { - return false; - } - } - for (const enabledNamespace of enabledNamespaces) { - if (namespaceMatches(namespace, enabledNamespace)) { - return true; + function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; + } + var ReportTransform = class extends node_stream_1.Transform { + loadedBytes = 0; + progressCallback; + // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } catch (e) { + callback(e); } } - return false; - } - function namespaceMatches(namespace, patternToMatch) { - if (patternToMatch.indexOf("*") === -1) { - return namespace === patternToMatch; + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; } - let pattern = patternToMatch; - if (patternToMatch.indexOf("**") !== -1) { - const patternParts = []; - let lastCharacter = ""; - for (const character of patternToMatch) { - if (character === "*" && lastCharacter === "*") { - continue; - } else { - lastCharacter = character; - patternParts.push(character); + }; + var NodeHttpClient = class { + cachedHttpAgent; + cachedHttpsAgents = /* @__PURE__ */ new WeakMap(); + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request2) { + const abortController = new AbortController(); + let abortListener; + if (request2.abortSignal) { + if (request2.abortSignal.aborted) { + throw new AbortError_js_1.AbortError("The operation was aborted. Request has already been canceled."); } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request2.abortSignal.addEventListener("abort", abortListener); } - pattern = patternParts.join(""); - } - let namespaceIndex = 0; - let patternIndex = 0; - const patternLength = pattern.length; - const namespaceLength = namespace.length; - let lastWildcard = -1; - let lastWildcardNamespace = -1; - while (namespaceIndex < namespaceLength && patternIndex < patternLength) { - if (pattern[patternIndex] === "*") { - lastWildcard = patternIndex; - patternIndex++; - if (patternIndex === patternLength) { - return true; + let timeoutId; + if (request2.timeout > 0) { + timeoutId = setTimeout(() => { + const sanitizer = new sanitizer_js_1.Sanitizer(); + log_js_1.logger.info(`request to '${sanitizer.sanitizeUrl(request2.url)}' timed out. canceling...`); + abortController.abort(); + }, request2.timeout); + } + const acceptEncoding = request2.headers.get("Accept-Encoding"); + const shouldDecompress = acceptEncoding?.includes("gzip") || acceptEncoding?.includes("deflate"); + let body = typeof request2.body === "function" ? request2.body() : request2.body; + if (body && !request2.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request2.headers.set("Content-Length", bodyLength); } - while (namespace[namespaceIndex] !== pattern[patternIndex]) { - namespaceIndex++; - if (namespaceIndex === namespaceLength) { - return false; + } + let responseStream; + try { + if (body && request2.onUploadProgress) { + const onUploadProgress = request2.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } else { + uploadReportStream.end(body); } + body = uploadReportStream; } - lastWildcardNamespace = namespaceIndex; - namespaceIndex++; - patternIndex++; - continue; - } else if (pattern[patternIndex] === namespace[namespaceIndex]) { - patternIndex++; - namespaceIndex++; - } else if (lastWildcard >= 0) { - patternIndex = lastWildcard + 1; - namespaceIndex = lastWildcardNamespace + 1; - if (namespaceIndex === namespaceLength) { - return false; + const res = await this.makeRequest(request2, abortController, body); + if (timeoutId !== void 0) { + clearTimeout(timeoutId); } - while (namespace[namespaceIndex] !== pattern[patternIndex]) { - namespaceIndex++; - if (namespaceIndex === namespaceLength) { - return false; + const headers = getResponseHeaders(res); + const status = res.statusCode ?? 0; + const response = { + status, + headers, + request: request2 + }; + if (request2.method === "HEAD") { + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request2.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + request2.streamResponseStatusCodes?.has(Number.POSITIVE_INFINITY) || request2.streamResponseStatusCodes?.has(response.status) + ) { + response.readableStreamBody = responseStream; + } else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } finally { + if (request2.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); } + Promise.all([uploadStreamDone, downloadStreamDone]).then(() => { + if (abortListener) { + request2.abortSignal?.removeEventListener("abort", abortListener); + } + }).catch((e) => { + log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); } - lastWildcardNamespace = namespaceIndex; - namespaceIndex++; - patternIndex++; - continue; + } + } + makeRequest(request2, abortController, body) { + const url = new URL(request2.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request2.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request2.url} while allowInsecureConnection is false.`); + } + const agent = request2.agent ?? this.getOrCreateAgent(request2, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request2.method, + headers: request2.headers.toJSON({ preserveCase: true }), + ...request2.requestOverrides + }; + return new Promise((resolve2, reject) => { + const req = isInsecure ? node_http_1.default.request(options, resolve2) : node_https_1.default.request(options, resolve2); + req.once("error", (err) => { + reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new AbortError_js_1.AbortError("The operation was aborted. Rejecting from abort signal callback while making request."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } else { + log_js_1.logger.error("Unrecognized body type", body); + reject(new restError_js_1.RestError("Unrecognized body type")); + } + } else { + req.end(); + } + }); + } + getOrCreateAgent(request2, isInsecure) { + const disableKeepAlive = request2.disableKeepAlive; + if (isInsecure) { + if (disableKeepAlive) { + return node_http_1.default.globalAgent; + } + if (!this.cachedHttpAgent) { + this.cachedHttpAgent = new node_http_1.default.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; } else { - return false; + if (disableKeepAlive && !request2.tlsSettings) { + return node_https_1.default.globalAgent; + } + const tlsSettings = request2.tlsSettings ?? DEFAULT_TLS_SETTINGS; + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new node_https_1.default.Agent({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive, + // Since we are spreading, if no tslSettings were provided, nothing is added to the agent options. + ...tlsSettings + }); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; } } - const namespaceDone = namespaceIndex === namespace.length; - const patternDone = patternIndex === pattern.length; - const trailingWildCard = patternIndex === pattern.length - 1 && pattern[patternIndex] === "*"; - return namespaceDone && (patternDone || trailingWildCard); + }; + function getResponseHeaders(res) { + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } else if (value) { + headers.set(header, value); + } + } + return headers; } - function disable() { - const result = enabledString || ""; - enable(""); - return result; + function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = node_zlib_1.default.createGunzip(); + stream.pipe(unzip); + return unzip; + } else if (contentEncoding === "deflate") { + const inflate = node_zlib_1.default.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; } - function createDebugger(namespace) { - const newDebugger = Object.assign(debug5, { - enabled: enabled(namespace), - destroy, - log: debugObj.log, - namespace, - extend: extend3 + function streamToText(stream) { + return new Promise((resolve2, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve2(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && e?.name === "AbortError") { + reject(e); + } else { + reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { + code: restError_js_1.RestError.PARSE_ERROR + })); + } + }); }); - function debug5(...args) { - if (!newDebugger.enabled) { - return; - } - if (args.length > 0) { - args[0] = `${namespace} ${args[0]}`; - } - newDebugger.log(...args); - } - debuggers.push(newDebugger); - return newDebugger; } - function destroy() { - const index = debuggers.indexOf(this); - if (index >= 0) { - debuggers.splice(index, 1); - return true; + function getBodyLength(body) { + if (!body) { + return 0; + } else if (Buffer.isBuffer(body)) { + return body.length; + } else if (isReadableStream(body)) { + return null; + } else if (isArrayBuffer(body)) { + return body.byteLength; + } else if (typeof body === "string") { + return Buffer.from(body).length; + } else { + return null; } - return false; } - function extend3(namespace) { - const newDebugger = createDebugger(`${this.namespace}:${namespace}`); - newDebugger.log = this.log; - return newDebugger; + function createNodeHttpClient() { + return new NodeHttpClient(); } - exports2.default = debugObj; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js -var require_logger = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js +var require_defaultHttpClient = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TypeSpecRuntimeLogger = void 0; - exports2.createLoggerContext = createLoggerContext; - exports2.setLogLevel = setLogLevel; - exports2.getLogLevel = getLogLevel; - exports2.createClientLogger = createClientLogger; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var debug_js_1 = tslib_1.__importDefault(require_debug2()); - var TYPESPEC_RUNTIME_LOG_LEVELS = ["verbose", "info", "warning", "error"]; - var levelMap = { - verbose: 400, - info: 300, - warning: 200, - error: 100 - }; - function patchLogMethod(parent, child) { - child.log = (...args) => { - parent.log(...args); - }; - } - function isTypeSpecRuntimeLogLevel(level) { - return TYPESPEC_RUNTIME_LOG_LEVELS.includes(level); + exports2.createDefaultHttpClient = createDefaultHttpClient; + var nodeHttpClient_js_1 = require_nodeHttpClient(); + function createDefaultHttpClient() { + return (0, nodeHttpClient_js_1.createNodeHttpClient)(); } - function createLoggerContext(options) { - const registeredLoggers = /* @__PURE__ */ new Set(); - const logLevelFromEnv = typeof process !== "undefined" && process.env && process.env[options.logLevelEnvVarName] || void 0; - let logLevel; - const clientLogger = (0, debug_js_1.default)(options.namespace); - clientLogger.log = (...args) => { - debug_js_1.default.log(...args); - }; - function contextSetLogLevel(level) { - if (level && !isTypeSpecRuntimeLogLevel(level)) { - throw new Error(`Unknown log level '${level}'. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(",")}`); - } - logLevel = level; - const enabledNamespaces = []; - for (const logger of registeredLoggers) { - if (shouldEnable(logger)) { - enabledNamespaces.push(logger.namespace); + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js +var require_logPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logPolicyName = void 0; + exports2.logPolicy = logPolicy; + var log_js_1 = require_log3(); + var sanitizer_js_1 = require_sanitizer(); + exports2.logPolicyName = "logPolicy"; + function logPolicy(options = {}) { + const logger = options.logger ?? log_js_1.logger.info; + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters + }); + return { + name: exports2.logPolicyName, + async sendRequest(request2, next) { + if (!logger.enabled) { + return next(request2); } + logger(`Request: ${sanitizer.sanitize(request2)}`); + const response = await next(request2); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; } - debug_js_1.default.enable(enabledNamespaces.join(",")); - } - if (logLevelFromEnv) { - if (isTypeSpecRuntimeLogLevel(logLevelFromEnv)) { - contextSetLogLevel(logLevelFromEnv); - } else { - console.error(`${options.logLevelEnvVarName} set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(", ")}.`); - } - } - function shouldEnable(logger) { - return Boolean(logLevel && levelMap[logger.level] <= levelMap[logLevel]); - } - function createLogger2(parent, level) { - const logger = Object.assign(parent.extend(level), { - level - }); - patchLogMethod(parent, logger); - if (shouldEnable(logger)) { - const enabledNamespaces = debug_js_1.default.disable(); - debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); - } - registeredLoggers.add(logger); - return logger; - } - function contextGetLogLevel() { - return logLevel; - } - function contextCreateClientLogger(namespace) { - const clientRootLogger = clientLogger.extend(namespace); - patchLogMethod(clientLogger, clientRootLogger); - return { - error: createLogger2(clientRootLogger, "error"), - warning: createLogger2(clientRootLogger, "warning"), - info: createLogger2(clientRootLogger, "info"), - verbose: createLogger2(clientRootLogger, "verbose") - }; - } - return { - setLogLevel: contextSetLogLevel, - getLogLevel: contextGetLogLevel, - createClientLogger: contextCreateClientLogger, - logger: clientLogger }; } - var context2 = createLoggerContext({ - logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", - namespace: "typeSpecRuntime" - }); - exports2.TypeSpecRuntimeLogger = context2.logger; - function setLogLevel(logLevel) { - context2.setLogLevel(logLevel); - } - function getLogLevel() { - return context2.getLogLevel(); - } - function createClientLogger(namespace) { - return context2.createClientLogger(namespace); - } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js -var require_httpHeaders = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js +var require_redirectPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpHeaders = createHttpHeaders; - function normalizeName(name) { - return name.toLowerCase(); - } - function* headerIterator(map2) { - for (const entry of map2.values()) { - yield [entry.name, entry.value]; - } - } - var HttpHeadersImpl = class { - _headersMap; - constructor(rawHeaders) { - this._headersMap = /* @__PURE__ */ new Map(); - if (rawHeaders) { - for (const headerName of Object.keys(rawHeaders)) { - this.set(headerName, rawHeaders[headerName]); - } + exports2.redirectPolicyName = void 0; + exports2.redirectPolicy = redirectPolicy; + exports2.redirectPolicyName = "redirectPolicy"; + var allowedRedirect = ["GET", "HEAD"]; + function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: exports2.redirectPolicyName, + async sendRequest(request2, next) { + const response = await next(request2); + return handleRedirect(next, response, maxRetries); } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param name - The name of the header to set. This value is case-insensitive. - * @param value - The value of the header to set. - */ - set(name, value) { - this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param name - The name of the header. This value is case-insensitive. - */ - get(name) { - return this._headersMap.get(normalizeName(name))?.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - * @param name - The name of the header to set. This value is case-insensitive. - */ - has(name) { - return this._headersMap.has(normalizeName(name)); - } - /** - * Remove the header with the provided headerName. - * @param name - The name of the header to remove. - */ - delete(name) { - this._headersMap.delete(normalizeName(name)); - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJSON(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const entry of this._headersMap.values()) { - result[entry.name] = entry.value; - } - } else { - for (const [normalizedName, entry] of this._headersMap) { - result[normalizedName] = entry.value; - } + }; + } + async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request: request2, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && (status === 300 || status === 301 && allowedRedirect.includes(request2.method) || status === 302 && allowedRedirect.includes(request2.method) || status === 303 && request2.method === "POST" || status === 307) && currentRetries < maxRetries) { + const url = new URL(locationHeader, request2.url); + request2.url = url.toString(); + if (status === 303) { + request2.method = "GET"; + request2.headers.delete("Content-Length"); + delete request2.body; } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJSON({ preserveCase: true })); - } - /** - * Iterate over tuples of header [name, value] pairs. - */ - [Symbol.iterator]() { - return headerIterator(this._headersMap); + request2.headers.delete("Authorization"); + const res = await next(request2); + return handleRedirect(next, res, maxRetries, currentRetries + 1); } - }; - function createHttpHeaders(rawHeaders) { - return new HttpHeadersImpl(rawHeaders); + return response; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js -var require_schemes = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js +var require_userAgentPlatform = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getHeaderName = getHeaderName; + exports2.setPlatformSpecificData = setPlatformSpecificData; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_os_1 = tslib_1.__importDefault(require("node:os")); + var node_process_1 = tslib_1.__importDefault(require("node:process")); + function getHeaderName() { + return "User-Agent"; + } + async function setPlatformSpecificData(map2) { + if (node_process_1.default && node_process_1.default.versions) { + const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; + const versions = node_process_1.default.versions; + if (versions.bun) { + map2.set("Bun", `${versions.bun} (${osInfo})`); + } else if (versions.deno) { + map2.set("Deno", `${versions.deno} (${osInfo})`); + } else if (versions.node) { + map2.set("Node", `${versions.node} (${osInfo})`); + } + } + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js -var require_oauth2Flows = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js +var require_constants13 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DEFAULT_RETRY_POLICY_COUNT = exports2.SDK_VERSION = void 0; + exports2.SDK_VERSION = "0.3.2"; + exports2.DEFAULT_RETRY_POLICY_COUNT = 3; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js -var require_uuidUtils = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js +var require_userAgent = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.randomUUID = randomUUID; - function randomUUID() { - return crypto.randomUUID(); + exports2.getUserAgentHeaderName = getUserAgentHeaderName; + exports2.getUserAgentValue = getUserAgentValue; + var userAgentPlatform_js_1 = require_userAgentPlatform(); + var constants_js_1 = require_constants13(); + function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); + } + function getUserAgentHeaderName() { + return (0, userAgentPlatform_js_1.getHeaderName)(); + } + async function getUserAgentValue(prefix) { + const runtimeInfo = /* @__PURE__ */ new Map(); + runtimeInfo.set("ts-http-runtime", constants_js_1.SDK_VERSION); + await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js -var require_pipelineRequest = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js +var require_userAgentPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createPipelineRequest = createPipelineRequest; - var httpHeaders_js_1 = require_httpHeaders(); - var uuidUtils_js_1 = require_uuidUtils(); - var PipelineRequestImpl = class { - url; - method; - headers; - timeout; - withCredentials; - body; - multipartBody; - formData; - streamResponseStatusCodes; - enableBrowserStreams; - proxySettings; - disableKeepAlive; - abortSignal; - requestId; - allowInsecureConnection; - onUploadProgress; - onDownloadProgress; - requestOverrides; - authSchemes; - constructor(options) { - this.url = options.url; - this.body = options.body; - this.headers = options.headers ?? (0, httpHeaders_js_1.createHttpHeaders)(); - this.method = options.method ?? "GET"; - this.timeout = options.timeout ?? 0; - this.multipartBody = options.multipartBody; - this.formData = options.formData; - this.disableKeepAlive = options.disableKeepAlive ?? false; - this.proxySettings = options.proxySettings; - this.streamResponseStatusCodes = options.streamResponseStatusCodes; - this.withCredentials = options.withCredentials ?? false; - this.abortSignal = options.abortSignal; - this.onUploadProgress = options.onUploadProgress; - this.onDownloadProgress = options.onDownloadProgress; - this.requestId = options.requestId || (0, uuidUtils_js_1.randomUUID)(); - this.allowInsecureConnection = options.allowInsecureConnection ?? false; - this.enableBrowserStreams = options.enableBrowserStreams ?? false; - this.requestOverrides = options.requestOverrides; - this.authSchemes = options.authSchemes; - } - }; - function createPipelineRequest(options) { - return new PipelineRequestImpl(options); + exports2.userAgentPolicyName = void 0; + exports2.userAgentPolicy = userAgentPolicy; + var userAgent_js_1 = require_userAgent(); + var UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); + exports2.userAgentPolicyName = "userAgentPolicy"; + function userAgentPolicy(options = {}) { + const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + return { + name: exports2.userAgentPolicyName, + async sendRequest(request2, next) { + if (!request2.headers.has(UserAgentHeaderName)) { + request2.headers.set(UserAgentHeaderName, await userAgentValue); + } + return next(request2); + } + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js -var require_pipeline = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js +var require_decompressResponsePolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createEmptyPipeline = createEmptyPipeline; - var ValidPhaseNames = /* @__PURE__ */ new Set(["Deserialize", "Serialize", "Retry", "Sign"]); - var HttpPipeline = class _HttpPipeline { - _policies = []; - _orderedPolicies; - constructor(policies) { - this._policies = policies?.slice(0) ?? []; - this._orderedPolicies = void 0; - } - addPolicy(policy, options = {}) { - if (options.phase && options.afterPhase) { - throw new Error("Policies inside a phase cannot specify afterPhase."); - } - if (options.phase && !ValidPhaseNames.has(options.phase)) { - throw new Error(`Invalid phase name: ${options.phase}`); - } - if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { - throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); - } - this._policies.push({ - policy, - options - }); - this._orderedPolicies = void 0; - } - removePolicy(options) { - const removedPolicies = []; - this._policies = this._policies.filter((policyDescriptor) => { - if (options.name && policyDescriptor.policy.name === options.name || options.phase && policyDescriptor.options.phase === options.phase) { - removedPolicies.push(policyDescriptor.policy); - return false; - } else { - return true; - } - }); - this._orderedPolicies = void 0; - return removedPolicies; - } - sendRequest(httpClient, request2) { - const policies = this.getOrderedPolicies(); - const pipeline = policies.reduceRight((next, policy) => { - return (req) => { - return policy.sendRequest(req, next); - }; - }, (req) => httpClient.sendRequest(req)); - return pipeline(request2); - } - getOrderedPolicies() { - if (!this._orderedPolicies) { - this._orderedPolicies = this.orderPolicies(); - } - return this._orderedPolicies; - } - clone() { - return new _HttpPipeline(this._policies); - } - static create() { - return new _HttpPipeline(); - } - orderPolicies() { - const result = []; - const policyMap = /* @__PURE__ */ new Map(); - function createPhase(name) { - return { - name, - policies: /* @__PURE__ */ new Set(), - hasRun: false, - hasAfterPolicies: false - }; - } - const serializePhase = createPhase("Serialize"); - const noPhase = createPhase("None"); - const deserializePhase = createPhase("Deserialize"); - const retryPhase = createPhase("Retry"); - const signPhase = createPhase("Sign"); - const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; - function getPhase(phase) { - if (phase === "Retry") { - return retryPhase; - } else if (phase === "Serialize") { - return serializePhase; - } else if (phase === "Deserialize") { - return deserializePhase; - } else if (phase === "Sign") { - return signPhase; - } else { - return noPhase; - } - } - for (const descriptor of this._policies) { - const policy = descriptor.policy; - const options = descriptor.options; - const policyName = policy.name; - if (policyMap.has(policyName)) { - throw new Error("Duplicate policy names not allowed in pipeline"); - } - const node = { - policy, - dependsOn: /* @__PURE__ */ new Set(), - dependants: /* @__PURE__ */ new Set() - }; - if (options.afterPhase) { - node.afterPhase = getPhase(options.afterPhase); - node.afterPhase.hasAfterPolicies = true; - } - policyMap.set(policyName, node); - const phase = getPhase(options.phase); - phase.policies.add(node); - } - for (const descriptor of this._policies) { - const { policy, options } = descriptor; - const policyName = policy.name; - const node = policyMap.get(policyName); - if (!node) { - throw new Error(`Missing node for policy ${policyName}`); - } - if (options.afterPolicies) { - for (const afterPolicyName of options.afterPolicies) { - const afterNode = policyMap.get(afterPolicyName); - if (afterNode) { - node.dependsOn.add(afterNode); - afterNode.dependants.add(node); - } - } - } - if (options.beforePolicies) { - for (const beforePolicyName of options.beforePolicies) { - const beforeNode = policyMap.get(beforePolicyName); - if (beforeNode) { - beforeNode.dependsOn.add(node); - node.dependants.add(beforeNode); - } - } - } - } - function walkPhase(phase) { - phase.hasRun = true; - for (const node of phase.policies) { - if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { - continue; - } - if (node.dependsOn.size === 0) { - result.push(node.policy); - for (const dependant of node.dependants) { - dependant.dependsOn.delete(node); - } - policyMap.delete(node.policy.name); - phase.policies.delete(node); - } - } - } - function walkPhases() { - for (const phase of orderedPhases) { - walkPhase(phase); - if (phase.policies.size > 0 && phase !== noPhase) { - if (!noPhase.hasRun) { - walkPhase(noPhase); - } - return; - } - if (phase.hasAfterPolicies) { - walkPhase(noPhase); - } - } - } - let iteration = 0; - while (policyMap.size > 0) { - iteration++; - const initialResultLength = result.length; - walkPhases(); - if (result.length <= initialResultLength && iteration > 1) { - throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + exports2.decompressResponsePolicyName = void 0; + exports2.decompressResponsePolicy = decompressResponsePolicy; + exports2.decompressResponsePolicyName = "decompressResponsePolicy"; + function decompressResponsePolicy() { + return { + name: exports2.decompressResponsePolicyName, + async sendRequest(request2, next) { + if (request2.method !== "HEAD") { + request2.headers.set("Accept-Encoding", "gzip,deflate"); } + return next(request2); } - return result; - } - }; - function createEmptyPipeline() { - return HttpPipeline.create(); + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js -var require_object = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js +var require_random2 = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isObject = isObject2; - function isObject2(input) { - return typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date); + exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; + function getRandomIntegerInclusive(min, max) { + min = Math.ceil(min); + max = Math.floor(max); + const offset = Math.floor(Math.random() * (max - min + 1)); + return offset + min; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js -var require_error = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js +var require_delay = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isError = isError; - var object_js_1 = require_object(); - function isError(e) { - if ((0, object_js_1.isObject)(e)) { - const hasName = typeof e.name === "string"; - const hasMessage = typeof e.message === "string"; - return hasName && hasMessage; - } - return false; + exports2.calculateRetryDelay = calculateRetryDelay; + var random_js_1 = require_random2(); + function calculateRetryDelay(retryAttempt, config) { + const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); + const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); + const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); + return { retryAfterInMs }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js -var require_inspect = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js +var require_helpers = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.custom = void 0; - var node_util_1 = require("node:util"); - exports2.custom = node_util_1.inspect.custom; + exports2.delay = delay2; + exports2.parseHeaderValueAsNumber = parseHeaderValueAsNumber; + var AbortError_js_1 = require_AbortError(); + var StandardAbortMessage = "The operation was aborted."; + function delay2(delayInMs, value, options) { + return new Promise((resolve2, reject) => { + let timer = void 0; + let onAborted = void 0; + const rejectOnAbort = () => { + return reject(new AbortError_js_1.AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if (options?.abortSignal && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if (options?.abortSignal && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve2(value); + }, delayInMs); + if (options?.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); + } + function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js -var require_sanitizer = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js +var require_throttlingRetryStrategy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Sanitizer = void 0; - var object_js_1 = require_object(); - var RedactedString = "REDACTED"; - var defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate" - ]; - var defaultAllowedQueryParameters = ["api-version"]; - var Sanitizer = class { - allowedHeaderNames; - allowedQueryParameters; - constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [] } = {}) { - allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); - allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - /** - * Sanitizes an object for logging. - * @param obj - The object to sanitize - * @returns - The sanitized object as a string - */ - sanitize(obj) { - const seen = /* @__PURE__ */ new Set(); - return JSON.stringify(obj, (key, value) => { - if (value instanceof Error) { - return { - ...value, - name: value.name, - message: value.message - }; - } - if (key === "headers") { - return this.sanitizeHeaders(value); - } else if (key === "url") { - return this.sanitizeUrl(value); - } else if (key === "query") { - return this.sanitizeQuery(value); - } else if (key === "body") { - return void 0; - } else if (key === "response") { - return void 0; - } else if (key === "operationSpec") { - return void 0; - } else if (Array.isArray(value) || (0, object_js_1.isObject)(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); - } - return value; - }, 2); - } - /** - * Sanitizes a URL for logging. - * @param value - The URL to sanitize - * @returns - The sanitized URL as a string - */ - sanitizeUrl(value) { - if (typeof value !== "string" || value === null || value === "") { - return value; - } - const url = new URL(value); - if (!url.search) { - return value; - } - for (const [key] of url.searchParams) { - if (!this.allowedQueryParameters.has(key.toLowerCase())) { - url.searchParams.set(key, RedactedString); + exports2.isThrottlingRetryResponse = isThrottlingRetryResponse; + exports2.throttlingRetryStrategy = throttlingRetryStrategy; + var helpers_js_1 = require_helpers(); + var RetryAfterHeader = "Retry-After"; + var AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; + function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return void 0; + try { + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + const multiplyingFactor = header === RetryAfterHeader ? 1e3 : 1; + return retryAfterValue * multiplyingFactor; } } - return url.toString(); + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + return Number.isFinite(diff) ? Math.max(0, diff) : void 0; + } catch { + return void 0; } - sanitizeHeaders(obj) { - const sanitized = {}; - for (const key of Object.keys(obj)) { - if (this.allowedHeaderNames.has(key.toLowerCase())) { - sanitized[key] = obj[key]; - } else { - sanitized[key] = RedactedString; + } + function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); + } + function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; } + return { + retryAfterInMs + }; } - return sanitized; - } - sanitizeQuery(value) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (this.allowedQueryParameters.has(k.toLowerCase())) { - sanitized[k] = value[k]; - } else { - sanitized[k] = RedactedString; + }; + } + } +}); + +// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js +var require_exponentialRetryStrategy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.exponentialRetryStrategy = exponentialRetryStrategy; + exports2.isExponentialRetryResponse = isExponentialRetryResponse; + exports2.isSystemError = isSystemError; + var delay_js_1 = require_delay(); + var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); + var DEFAULT_CLIENT_RETRY_INTERVAL = 1e3; + var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1e3 * 64; + function exponentialRetryStrategy(options = {}) { + const retryInterval = options.retryDelayInMs ?? DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = options.maxRetryDelayInMs ?? DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; } + return (0, delay_js_1.calculateRetryDelay)(retryCount, { + retryDelayInMs: retryInterval, + maxRetryDelayInMs: maxRetryInterval + }); } - return sanitized; + }; + } + function isExponentialRetryResponse(response) { + return Boolean(response && response.status !== void 0 && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); + } + function isSystemError(err) { + if (!err) { + return false; } - }; - exports2.Sanitizer = Sanitizer; + return err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"; + } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js -var require_restError = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js +var require_retryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RestError = void 0; - exports2.isRestError = isRestError; - var error_js_1 = require_error(); - var inspect_js_1 = require_inspect(); - var sanitizer_js_1 = require_sanitizer(); - var errorSanitizer = new sanitizer_js_1.Sanitizer(); - var RestError = class _RestError extends Error { - /** - * Something went wrong when making the request. - * This means the actual request failed for some reason, - * such as a DNS issue or the connection being lost. - */ - static REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; - /** - * This means that parsing the response from the server failed. - * It may have been malformed. - */ - static PARSE_ERROR = "PARSE_ERROR"; - /** - * The code of the error itself (use statics on RestError if possible.) - */ - code; - /** - * The HTTP status code of the request (if applicable.) - */ - statusCode; - /** - * The request that was made. - * This property is non-enumerable. - */ - request; - /** - * The response received (if any.) - * This property is non-enumerable. - */ - response; - /** - * Bonus property set by the throw site. - */ - details; - constructor(message, options = {}) { - super(message); - this.name = "RestError"; - this.code = options.code; - this.statusCode = options.statusCode; - Object.defineProperty(this, "request", { value: options.request, enumerable: false }); - Object.defineProperty(this, "response", { value: options.response, enumerable: false }); - const agent = this.request?.agent ? { - maxFreeSockets: this.request.agent.maxFreeSockets, - maxSockets: this.request.agent.maxSockets - } : void 0; - Object.defineProperty(this, inspect_js_1.custom, { - value: () => { - return `RestError: ${this.message} - ${errorSanitizer.sanitize({ - ...this, - request: { ...this.request, agent }, - response: this.response - })}`; - }, - enumerable: false - }); - Object.setPrototypeOf(this, _RestError.prototype); - } - }; - exports2.RestError = RestError; - function isRestError(e) { - if (e instanceof RestError) { - return true; - } - return (0, error_js_1.isError)(e) && e.name === "RestError"; + exports2.retryPolicy = retryPolicy; + var helpers_js_1 = require_helpers(); + var AbortError_js_1 = require_AbortError(); + var logger_js_1 = require_logger(); + var constants_js_1 = require_constants13(); + var retryPolicyLogger = (0, logger_js_1.createClientLogger)("ts-http-runtime retryPolicy"); + var retryPolicyName = "retryPolicy"; + function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request2, next) { + let response; + let responseError; + let retryCount = -1; + retryRequest: while (true) { + retryCount += 1; + response = void 0; + responseError = void 0; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request2.requestId); + response = await next(request2); + logger.info(`Retry ${retryCount}: Received a response from request`, request2.requestId); + } catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request2.requestId); + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if (request2.abortSignal?.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new AbortError_js_1.AbortError(); + throw abortError; + } + if (retryCount >= (options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } else if (response) { + return response; + } else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || logger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await (0, helpers_js_1.delay)(retryAfterInMs, void 0, { abortSignal: request2.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request2.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + } + } + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js -var require_bytesEncoding = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js +var require_defaultRetryPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.uint8ArrayToString = uint8ArrayToString; - exports2.stringToUint8Array = stringToUint8Array; - function uint8ArrayToString(bytes, format) { - return Buffer.from(bytes).toString(format); - } - function stringToUint8Array(value, format) { - return Buffer.from(value, format); + exports2.defaultRetryPolicyName = void 0; + exports2.defaultRetryPolicy = defaultRetryPolicy; + var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); + var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); + var retryPolicy_js_1 = require_retryPolicy(); + var constants_js_1 = require_constants13(); + exports2.defaultRetryPolicyName = "defaultRetryPolicy"; + function defaultRetryPolicy(options = {}) { + return { + name: exports2.defaultRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { + maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT + }).sendRequest + }; } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js -var require_log3 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js +var require_checkEnvironment = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = void 0; - var logger_js_1 = require_logger(); - exports2.logger = (0, logger_js_1.createClientLogger)("ts-http-runtime"); + exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isDeno = exports2.isWebWorker = exports2.isBrowser = void 0; + exports2.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; + exports2.isWebWorker = typeof self === "object" && typeof self?.importScripts === "function" && (self.constructor?.name === "DedicatedWorkerGlobalScope" || self.constructor?.name === "ServiceWorkerGlobalScope" || self.constructor?.name === "SharedWorkerGlobalScope"); + exports2.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; + exports2.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; + exports2.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean(globalThis.process.versions?.node); + exports2.isNodeRuntime = exports2.isNodeLike && !exports2.isBun && !exports2.isDeno; + exports2.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js -var require_nodeHttpClient = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js"(exports2) { +// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js +var require_formDataPolicy = __commonJS({ + "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getBodyLength = getBodyLength; - exports2.createNodeHttpClient = createNodeHttpClient; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_http_1 = tslib_1.__importDefault(require("node:http")); - var node_https_1 = tslib_1.__importDefault(require("node:https")); - var node_zlib_1 = tslib_1.__importDefault(require("node:zlib")); - var node_stream_1 = require("node:stream"); - var AbortError_js_1 = require_AbortError(); + exports2.formDataPolicyName = void 0; + exports2.formDataPolicy = formDataPolicy; + var bytesEncoding_js_1 = require_bytesEncoding(); + var checkEnvironment_js_1 = require_checkEnvironment(); var httpHeaders_js_1 = require_httpHeaders(); - var restError_js_1 = require_restError(); - var log_js_1 = require_log3(); - var sanitizer_js_1 = require_sanitizer(); - var DEFAULT_TLS_SETTINGS = {}; - function isReadableStream(body) { - return body && typeof body.pipe === "function"; - } - function isStreamComplete(stream) { - if (stream.readable === false) { - return Promise.resolve(); + exports2.formDataPolicyName = "formDataPolicy"; + function formDataToFormDataMap(formData) { + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + formDataMap[key] ??= []; + formDataMap[key].push(value); } - return new Promise((resolve2) => { - const handler2 = () => { - resolve2(); - stream.removeListener("close", handler2); - stream.removeListener("end", handler2); - stream.removeListener("error", handler2); - }; - stream.on("close", handler2); - stream.on("end", handler2); - stream.on("error", handler2); - }); + return formDataMap; } - function isArrayBuffer(body) { - return body && typeof body.byteLength === "number"; + function formDataPolicy() { + return { + name: exports2.formDataPolicyName, + async sendRequest(request2, next) { + if (checkEnvironment_js_1.isNodeLike && typeof FormData !== "undefined" && request2.body instanceof FormData) { + request2.formData = formDataToFormDataMap(request2.body); + request2.body = void 0; + } + if (request2.formData) { + const contentType = request2.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request2.body = wwwFormUrlEncode(request2.formData); + } else { + await prepareFormData(request2.formData, request2); + } + request2.formData = void 0; + } + return next(request2); + } + }; } - var ReportTransform = class extends node_stream_1.Transform { - loadedBytes = 0; - progressCallback; - // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - try { - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(); - } catch (e) { - callback(e); + function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } else { + urlSearchParams.append(key, value.toString()); } } - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; + return urlSearchParams.toString(); + } + async function prepareFormData(formData, request2) { + const contentType = request2.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + return; } - }; - var NodeHttpClient = class { - cachedHttpAgent; - cachedHttpsAgents = /* @__PURE__ */ new WeakMap(); - /** - * Makes a request over an underlying transport layer and returns the response. - * @param request - The request to be made. - */ - async sendRequest(request2) { - const abortController = new AbortController(); - let abortListener; - if (request2.abortSignal) { - if (request2.abortSignal.aborted) { - throw new AbortError_js_1.AbortError("The operation was aborted. Request has already been canceled."); + request2.headers.set("Content-Type", contentType ?? "multipart/form-data"); + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: (0, httpHeaders_js_1.createHttpHeaders)({ + "Content-Disposition": `form-data; name="${fieldName}"` + }), + body: (0, bytesEncoding_js_1.stringToUint8Array)(value, "utf-8") + }); + } else if (value === void 0 || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } else { + const fileName = value.name || "blob"; + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value + }); } - abortListener = (event) => { - if (event.type === "abort") { - abortController.abort(); - } - }; - request2.abortSignal.addEventListener("abort", abortListener); - } - let timeoutId; - if (request2.timeout > 0) { - timeoutId = setTimeout(() => { - const sanitizer = new sanitizer_js_1.Sanitizer(); - log_js_1.logger.info(`request to '${sanitizer.sanitizeUrl(request2.url)}' timed out. canceling...`); - abortController.abort(); - }, request2.timeout); } - const acceptEncoding = request2.headers.get("Accept-Encoding"); - const shouldDecompress = acceptEncoding?.includes("gzip") || acceptEncoding?.includes("deflate"); - let body = typeof request2.body === "function" ? request2.body() : request2.body; - if (body && !request2.headers.has("Content-Length")) { - const bodyLength = getBodyLength(body); - if (bodyLength !== null) { - request2.headers.set("Content-Length", bodyLength); - } + } + request2.multipartBody = { parts }; + } + } +}); + +// node_modules/ms/index.js +var require_ms = __commonJS({ + "node_modules/ms/index.js"(exports2, module2) { + var s = 1e3; + var m = s * 60; + var h = m * 60; + var d = h * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type2 = typeof val; + if (type2 === "string" && val.length > 0) { + return parse2(val); + } else if (type2 === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) + ); + }; + function parse2(str2) { + str2 = String(str2); + if (str2.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str2 + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type2 = (match[2] || "ms").toLowerCase(); + switch (type2) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; + } + } + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + "d"; + } + if (msAbs >= h) { + return Math.round(ms / h) + "h"; + } + if (msAbs >= m) { + return Math.round(ms / m) + "m"; + } + if (msAbs >= s) { + return Math.round(ms / s) + "s"; + } + return ms + "ms"; + } + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, "day"); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, "hour"); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, "minute"); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, "second"); + } + return ms + " ms"; + } + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); + } + } +}); + +// node_modules/debug/src/common.js +var require_common = __commonJS({ + "node_modules/debug/src/common.js"(exports2, module2) { + function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce2; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require_ms(); + createDebug.destroy = destroy; + Object.keys(env).forEach((key) => { + createDebug[key] = env[key]; + }); + createDebug.names = []; + createDebug.skips = []; + createDebug.formatters = {}; + function selectColor(namespace) { + let hash = 0; + for (let i = 0; i < namespace.length; i++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i); + hash |= 0; } - let responseStream; - try { - if (body && request2.onUploadProgress) { - const onUploadProgress = request2.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - uploadReportStream.on("error", (e) => { - log_js_1.logger.error("Error in upload progress", e); - }); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const res = await this.makeRequest(request2, abortController, body); - if (timeoutId !== void 0) { - clearTimeout(timeoutId); - } - const headers = getResponseHeaders(res); - const status = res.statusCode ?? 0; - const response = { - status, - headers, - request: request2 - }; - if (request2.method === "HEAD") { - res.resume(); - return response; - } - responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; - const onDownloadProgress = request2.onDownloadProgress; - if (onDownloadProgress) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - downloadReportStream.on("error", (e) => { - log_js_1.logger.error("Error in download progress", e); - }); - responseStream.pipe(downloadReportStream); - responseStream = downloadReportStream; + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + function debug5(...args) { + if (!debug5.enabled) { + return; } - if ( - // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code - request2.streamResponseStatusCodes?.has(Number.POSITIVE_INFINITY) || request2.streamResponseStatusCodes?.has(response.status) - ) { - response.readableStreamBody = responseStream; - } else { - response.bodyAsText = await streamToText(responseStream); + const self2 = debug5; + const curr = Number(/* @__PURE__ */ new Date()); + const ms = curr - (prevTime || curr); + self2.diff = ms; + self2.prev = prevTime; + self2.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + if (typeof args[0] !== "string") { + args.unshift("%O"); } - return response; - } finally { - if (request2.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + if (match === "%%") { + return "%"; } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(responseStream)) { - downloadStreamDone = isStreamComplete(responseStream); + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === "function") { + const val = args[index]; + match = formatter.call(self2, val); + args.splice(index, 1); + index--; } - Promise.all([uploadStreamDone, downloadStreamDone]).then(() => { - if (abortListener) { - request2.abortSignal?.removeEventListener("abort", abortListener); - } - }).catch((e) => { - log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); + return match; + }); + createDebug.formatArgs.call(self2, args); + const logFn = self2.log || createDebug.log; + logFn.apply(self2, args); + } + debug5.namespace = namespace; + debug5.useColors = createDebug.useColors(); + debug5.color = createDebug.selectColor(namespace); + debug5.extend = extend3; + debug5.destroy = createDebug.destroy; + Object.defineProperty(debug5, "enabled", { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + return enabledCache; + }, + set: (v) => { + enableOverride = v; } + }); + if (typeof createDebug.init === "function") { + createDebug.init(debug5); } + return debug5; } - makeRequest(request2, abortController, body) { - const url = new URL(request2.url); - const isInsecure = url.protocol !== "https:"; - if (isInsecure && !request2.allowInsecureConnection) { - throw new Error(`Cannot connect to ${request2.url} while allowInsecureConnection is false.`); + function extend3(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; + const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean); + for (const ns of split) { + if (ns[0] === "-") { + createDebug.skips.push(ns.slice(1)); + } else { + createDebug.names.push(ns); + } } - const agent = request2.agent ?? this.getOrCreateAgent(request2, isInsecure); - const options = { - agent, - hostname: url.hostname, - path: `${url.pathname}${url.search}`, - port: url.port, - method: request2.method, - headers: request2.headers.toJSON({ preserveCase: true }), - ...request2.requestOverrides - }; - return new Promise((resolve2, reject) => { - const req = isInsecure ? node_http_1.default.request(options, resolve2) : node_https_1.default.request(options, resolve2); - req.once("error", (err) => { - reject(new restError_js_1.RestError(err.message, { code: err.code ?? restError_js_1.RestError.REQUEST_SEND_ERROR, request: request2 })); - }); - abortController.signal.addEventListener("abort", () => { - const abortError = new AbortError_js_1.AbortError("The operation was aborted. Rejecting from abort signal callback while making request."); - req.destroy(abortError); - reject(abortError); - }); - if (body && isReadableStream(body)) { - body.pipe(req); - } else if (body) { - if (typeof body === "string" || Buffer.isBuffer(body)) { - req.end(body); - } else if (isArrayBuffer(body)) { - req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + function matchesTemplate(search, template) { + let searchIndex = 0; + let templateIndex = 0; + let starIndex = -1; + let matchIndex = 0; + while (searchIndex < search.length) { + if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) { + if (template[templateIndex] === "*") { + starIndex = templateIndex; + matchIndex = searchIndex; + templateIndex++; } else { - log_js_1.logger.error("Unrecognized body type", body); - reject(new restError_js_1.RestError("Unrecognized body type")); + searchIndex++; + templateIndex++; } + } else if (starIndex !== -1) { + templateIndex = starIndex + 1; + matchIndex++; + searchIndex = matchIndex; } else { - req.end(); + return false; } - }); + } + while (templateIndex < template.length && template[templateIndex] === "*") { + templateIndex++; + } + return templateIndex === template.length; } - getOrCreateAgent(request2, isInsecure) { - const disableKeepAlive = request2.disableKeepAlive; - if (isInsecure) { - if (disableKeepAlive) { - return node_http_1.default.globalAgent; - } - if (!this.cachedHttpAgent) { - this.cachedHttpAgent = new node_http_1.default.Agent({ keepAlive: true }); - } - return this.cachedHttpAgent; - } else { - if (disableKeepAlive && !request2.tlsSettings) { - return node_https_1.default.globalAgent; - } - const tlsSettings = request2.tlsSettings ?? DEFAULT_TLS_SETTINGS; - let agent = this.cachedHttpsAgents.get(tlsSettings); - if (agent && agent.options.keepAlive === !disableKeepAlive) { - return agent; + function disable() { + const namespaces = [ + ...createDebug.names, + ...createDebug.skips.map((namespace) => "-" + namespace) + ].join(","); + createDebug.enable(""); + return namespaces; + } + function enabled(name) { + for (const skip of createDebug.skips) { + if (matchesTemplate(name, skip)) { + return false; } - log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); - agent = new node_https_1.default.Agent({ - // keepAlive is true if disableKeepAlive is false. - keepAlive: !disableKeepAlive, - // Since we are spreading, if no tslSettings were provided, nothing is added to the agent options. - ...tlsSettings - }); - this.cachedHttpsAgents.set(tlsSettings, agent); - return agent; } - } - }; - function getResponseHeaders(res) { - const headers = (0, httpHeaders_js_1.createHttpHeaders)(); - for (const header of Object.keys(res.headers)) { - const value = res.headers[header]; - if (Array.isArray(value)) { - if (value.length > 0) { - headers.set(header, value[0]); + for (const ns of createDebug.names) { + if (matchesTemplate(name, ns)) { + return true; } - } else if (value) { - headers.set(header, value); } + return false; } - return headers; - } - function getDecodedResponseStream(stream, headers) { - const contentEncoding = headers.get("Content-Encoding"); - if (contentEncoding === "gzip") { - const unzip = node_zlib_1.default.createGunzip(); - stream.pipe(unzip); - return unzip; - } else if (contentEncoding === "deflate") { - const inflate = node_zlib_1.default.createInflate(); - stream.pipe(inflate); - return inflate; + function coerce2(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; } - return stream; - } - function streamToText(stream) { - return new Promise((resolve2, reject) => { - const buffer = []; - stream.on("data", (chunk) => { - if (Buffer.isBuffer(chunk)) { - buffer.push(chunk); - } else { - buffer.push(Buffer.from(chunk)); - } - }); - stream.on("end", () => { - resolve2(Buffer.concat(buffer).toString("utf8")); - }); - stream.on("error", (e) => { - if (e && e?.name === "AbortError") { - reject(e); - } else { - reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { - code: restError_js_1.RestError.PARSE_ERROR - })); - } - }); - }); - } - function getBodyLength(body) { - if (!body) { - return 0; - } else if (Buffer.isBuffer(body)) { - return body.length; - } else if (isReadableStream(body)) { - return null; - } else if (isArrayBuffer(body)) { - return body.byteLength; - } else if (typeof body === "string") { - return Buffer.from(body).length; - } else { - return null; + function destroy() { + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } + createDebug.enable(createDebug.load()); + return createDebug; } - function createNodeHttpClient() { - return new NodeHttpClient(); - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js -var require_defaultHttpClient = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDefaultHttpClient = createDefaultHttpClient; - var nodeHttpClient_js_1 = require_nodeHttpClient(); - function createDefaultHttpClient() { - return (0, nodeHttpClient_js_1.createNodeHttpClient)(); - } + module2.exports = setup; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js -var require_logPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logPolicyName = void 0; - exports2.logPolicy = logPolicy; - var log_js_1 = require_log3(); - var sanitizer_js_1 = require_sanitizer(); - exports2.logPolicyName = "logPolicy"; - function logPolicy(options = {}) { - const logger = options.logger ?? log_js_1.logger.info; - const sanitizer = new sanitizer_js_1.Sanitizer({ - additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, - additionalAllowedQueryParameters: options.additionalAllowedQueryParameters - }); - return { - name: exports2.logPolicyName, - async sendRequest(request2, next) { - if (!logger.enabled) { - return next(request2); - } - logger(`Request: ${sanitizer.sanitize(request2)}`); - const response = await next(request2); - logger(`Response status code: ${response.status}`); - logger(`Headers: ${sanitizer.sanitize(response.headers)}`); - return response; +// node_modules/debug/src/browser.js +var require_browser = __commonJS({ + "node_modules/debug/src/browser.js"(exports2, module2) { + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load2; + exports2.useColors = useColors; + exports2.storage = localstorage(); + exports2.destroy = /* @__PURE__ */ (() => { + let warned = false; + return () => { + if (!warned) { + warned = true; + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } }; + })(); + exports2.colors = [ + "#0000CC", + "#0000FF", + "#0033CC", + "#0033FF", + "#0066CC", + "#0066FF", + "#0099CC", + "#0099FF", + "#00CC00", + "#00CC33", + "#00CC66", + "#00CC99", + "#00CCCC", + "#00CCFF", + "#3300CC", + "#3300FF", + "#3333CC", + "#3333FF", + "#3366CC", + "#3366FF", + "#3399CC", + "#3399FF", + "#33CC00", + "#33CC33", + "#33CC66", + "#33CC99", + "#33CCCC", + "#33CCFF", + "#6600CC", + "#6600FF", + "#6633CC", + "#6633FF", + "#66CC00", + "#66CC33", + "#9900CC", + "#9900FF", + "#9933CC", + "#9933FF", + "#99CC00", + "#99CC33", + "#CC0000", + "#CC0033", + "#CC0066", + "#CC0099", + "#CC00CC", + "#CC00FF", + "#CC3300", + "#CC3333", + "#CC3366", + "#CC3399", + "#CC33CC", + "#CC33FF", + "#CC6600", + "#CC6633", + "#CC9900", + "#CC9933", + "#CCCC00", + "#CCCC33", + "#FF0000", + "#FF0033", + "#FF0066", + "#FF0099", + "#FF00CC", + "#FF00FF", + "#FF3300", + "#FF3333", + "#FF3366", + "#FF3399", + "#FF33CC", + "#FF33FF", + "#FF6600", + "#FF6633", + "#FF9900", + "#FF9933", + "#FFCC00", + "#FFCC33" + ]; + function useColors() { + if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { + return true; + } + if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + let m; + return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== "undefined" && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js -var require_redirectPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.redirectPolicyName = void 0; - exports2.redirectPolicy = redirectPolicy; - exports2.redirectPolicyName = "redirectPolicy"; - var allowedRedirect = ["GET", "HEAD"]; - function redirectPolicy(options = {}) { - const { maxRetries = 20 } = options; - return { - name: exports2.redirectPolicyName, - async sendRequest(request2, next) { - const response = await next(request2); - return handleRedirect(next, response, maxRetries); + function formatArgs(args) { + args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); + if (!this.useColors) { + return; + } + const c = "color: " + this.color; + args.splice(1, 0, c, "color: inherit"); + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, (match) => { + if (match === "%%") { + return; } - }; + index++; + if (match === "%c") { + lastC = index; + } + }); + args.splice(lastC, 0, c); } - async function handleRedirect(next, response, maxRetries, currentRetries = 0) { - const { request: request2, status, headers } = response; - const locationHeader = headers.get("location"); - if (locationHeader && (status === 300 || status === 301 && allowedRedirect.includes(request2.method) || status === 302 && allowedRedirect.includes(request2.method) || status === 303 && request2.method === "POST" || status === 307) && currentRetries < maxRetries) { - const url = new URL(locationHeader, request2.url); - request2.url = url.toString(); - if (status === 303) { - request2.method = "GET"; - request2.headers.delete("Content-Length"); - delete request2.body; + exports2.log = console.debug || console.log || (() => { + }); + function save(namespaces) { + try { + if (namespaces) { + exports2.storage.setItem("debug", namespaces); + } else { + exports2.storage.removeItem("debug"); } - request2.headers.delete("Authorization"); - const res = await next(request2); - return handleRedirect(next, res, maxRetries, currentRetries + 1); + } catch (error3) { } - return response; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js -var require_userAgentPlatform = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getHeaderName = getHeaderName; - exports2.setPlatformSpecificData = setPlatformSpecificData; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_os_1 = tslib_1.__importDefault(require("node:os")); - var node_process_1 = tslib_1.__importDefault(require("node:process")); - function getHeaderName() { - return "User-Agent"; + function load2() { + let r; + try { + r = exports2.storage.getItem("debug") || exports2.storage.getItem("DEBUG"); + } catch (error3) { + } + if (!r && typeof process !== "undefined" && "env" in process) { + r = process.env.DEBUG; + } + return r; } - async function setPlatformSpecificData(map2) { - if (node_process_1.default && node_process_1.default.versions) { - const osInfo = `${node_os_1.default.type()} ${node_os_1.default.release()}; ${node_os_1.default.arch()}`; - const versions = node_process_1.default.versions; - if (versions.bun) { - map2.set("Bun", `${versions.bun} (${osInfo})`); - } else if (versions.deno) { - map2.set("Deno", `${versions.deno} (${osInfo})`); - } else if (versions.node) { - map2.set("Node", `${versions.node} (${osInfo})`); - } + function localstorage() { + try { + return localStorage; + } catch (error3) { } } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (error3) { + return "[UnexpectedJSONParseError]: " + error3.message; + } + }; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js -var require_constants13 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js"(exports2) { +// node_modules/has-flag/index.js +var require_has_flag = __commonJS({ + "node_modules/has-flag/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DEFAULT_RETRY_POLICY_COUNT = exports2.SDK_VERSION = void 0; - exports2.SDK_VERSION = "0.3.2"; - exports2.DEFAULT_RETRY_POLICY_COUNT = 3; + module2.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf("--"); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); + }; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js -var require_userAgent = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js"(exports2) { +// node_modules/supports-color/index.js +var require_supports_color = __commonJS({ + "node_modules/supports-color/index.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUserAgentHeaderName = getUserAgentHeaderName; - exports2.getUserAgentValue = getUserAgentValue; - var userAgentPlatform_js_1 = require_userAgentPlatform(); - var constants_js_1 = require_constants13(); - function getUserAgentString(telemetryInfo) { - const parts = []; - for (const [key, value] of telemetryInfo) { - const token = value ? `${key}/${value}` : key; - parts.push(token); - } - return parts.join(" "); - } - function getUserAgentHeaderName() { - return (0, userAgentPlatform_js_1.getHeaderName)(); + var os2 = require("os"); + var tty = require("tty"); + var hasFlag = require_has_flag(); + var { env } = process; + var forceColor; + if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { + forceColor = 0; + } else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { + forceColor = 1; } - async function getUserAgentValue(prefix) { - const runtimeInfo = /* @__PURE__ */ new Map(); - runtimeInfo.set("ts-http-runtime", constants_js_1.SDK_VERSION); - await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); - const defaultAgent = getUserAgentString(runtimeInfo); - const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; - return userAgentValue; + if ("FORCE_COLOR" in env) { + if (env.FORCE_COLOR === "true") { + forceColor = 1; + } else if (env.FORCE_COLOR === "false") { + forceColor = 0; + } else { + forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); + } } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js -var require_userAgentPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.userAgentPolicyName = void 0; - exports2.userAgentPolicy = userAgentPolicy; - var userAgent_js_1 = require_userAgent(); - var UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); - exports2.userAgentPolicyName = "userAgentPolicy"; - function userAgentPolicy(options = {}) { - const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + function translateLevel(level) { + if (level === 0) { + return false; + } return { - name: exports2.userAgentPolicyName, - async sendRequest(request2, next) { - if (!request2.headers.has(UserAgentHeaderName)) { - request2.headers.set(UserAgentHeaderName, await userAgentValue); - } - return next(request2); - } + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 }; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js -var require_decompressResponsePolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.decompressResponsePolicyName = void 0; - exports2.decompressResponsePolicy = decompressResponsePolicy; - exports2.decompressResponsePolicyName = "decompressResponsePolicy"; - function decompressResponsePolicy() { - return { - name: exports2.decompressResponsePolicyName, - async sendRequest(request2, next) { - if (request2.method !== "HEAD") { - request2.headers.set("Accept-Encoding", "gzip,deflate"); - } - return next(request2); + function supportsColor(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; + } + if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { + return 3; + } + if (hasFlag("color=256")) { + return 2; + } + if (haveStream && !streamIsTTY && forceColor === void 0) { + return 0; + } + const min = forceColor || 0; + if (env.TERM === "dumb") { + return min; + } + if (process.platform === "win32") { + const osRelease = os2.release().split("."); + if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; } - }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js -var require_random2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getRandomIntegerInclusive = getRandomIntegerInclusive; - function getRandomIntegerInclusive(min, max) { - min = Math.ceil(min); - max = Math.floor(max); - const offset = Math.floor(Math.random() * (max - min + 1)); - return offset + min; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js -var require_delay = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.calculateRetryDelay = calculateRetryDelay; - var random_js_1 = require_random2(); - function calculateRetryDelay(retryAttempt, config) { - const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); - const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); - const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); - return { retryAfterInMs }; - } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js -var require_helpers2 = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.delay = delay2; - exports2.parseHeaderValueAsNumber = parseHeaderValueAsNumber; - var AbortError_js_1 = require_AbortError(); - var StandardAbortMessage = "The operation was aborted."; - function delay2(delayInMs, value, options) { - return new Promise((resolve2, reject) => { - let timer = void 0; - let onAborted = void 0; - const rejectOnAbort = () => { - return reject(new AbortError_js_1.AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)); - }; - const removeListeners = () => { - if (options?.abortSignal && onAborted) { - options.abortSignal.removeEventListener("abort", onAborted); - } - }; - onAborted = () => { - if (timer) { - clearTimeout(timer); - } - removeListeners(); - return rejectOnAbort(); - }; - if (options?.abortSignal && options.abortSignal.aborted) { - return rejectOnAbort(); + return 1; + } + if ("CI" in env) { + if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { + return 1; } - timer = setTimeout(() => { - removeListeners(); - resolve2(value); - }, delayInMs); - if (options?.abortSignal) { - options.abortSignal.addEventListener("abort", onAborted); + return min; + } + if ("TEAMCITY_VERSION" in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + if (env.COLORTERM === "truecolor") { + return 3; + } + if ("TERM_PROGRAM" in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); + switch (env.TERM_PROGRAM) { + case "iTerm.app": + return version >= 3 ? 3 : 2; + case "Apple_Terminal": + return 2; } - }); + } + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + if ("COLORTERM" in env) { + return 1; + } + return min; } - function parseHeaderValueAsNumber(response, headerName) { - const value = response.headers.get(headerName); - if (!value) - return; - const valueAsNum = Number(value); - if (Number.isNaN(valueAsNum)) - return; - return valueAsNum; + function getSupportLevel(stream) { + const level = supportsColor(stream, stream && stream.isTTY); + return translateLevel(level); } + module2.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel(supportsColor(true, tty.isatty(1))), + stderr: translateLevel(supportsColor(true, tty.isatty(2))) + }; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js -var require_throttlingRetryStrategy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isThrottlingRetryResponse = isThrottlingRetryResponse; - exports2.throttlingRetryStrategy = throttlingRetryStrategy; - var helpers_js_1 = require_helpers2(); - var RetryAfterHeader = "Retry-After"; - var AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; - function getRetryAfterInMs(response) { - if (!(response && [429, 503].includes(response.status))) - return void 0; - try { - for (const header of AllRetryAfterHeaders) { - const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); - if (retryAfterValue === 0 || retryAfterValue) { - const multiplyingFactor = header === RetryAfterHeader ? 1e3 : 1; - return retryAfterValue * multiplyingFactor; - } - } - const retryAfterHeader = response.headers.get(RetryAfterHeader); - if (!retryAfterHeader) - return; - const date = Date.parse(retryAfterHeader); - const diff = date - Date.now(); - return Number.isFinite(diff) ? Math.max(0, diff) : void 0; - } catch { - return void 0; +// node_modules/debug/src/node.js +var require_node = __commonJS({ + "node_modules/debug/src/node.js"(exports2, module2) { + var tty = require("tty"); + var util = require("util"); + exports2.init = init; + exports2.log = log; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load2; + exports2.useColors = useColors; + exports2.destroy = util.deprecate( + () => { + }, + "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." + ); + exports2.colors = [6, 2, 3, 4, 5, 1]; + try { + const supportsColor = require_supports_color(); + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports2.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; } + } catch (error3) { } - function isThrottlingRetryResponse(response) { - return Number.isFinite(getRetryAfterInMs(response)); + exports2.inspectOpts = Object.keys(process.env).filter((key) => { + return /^debug_/i.test(key); + }).reduce((obj, key) => { + const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === "null") { + val = null; + } else { + val = Number(val); + } + obj[prop] = val; + return obj; + }, {}); + function useColors() { + return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd); } - function throttlingRetryStrategy() { - return { - name: "throttlingRetryStrategy", - retry({ response }) { - const retryAfterInMs = getRetryAfterInMs(response); - if (!Number.isFinite(retryAfterInMs)) { - return { skipStrategy: true }; - } - return { - retryAfterInMs - }; - } - }; + function formatArgs(args) { + const { namespace: name, useColors: useColors2 } = this; + if (useColors2) { + const c = this.color; + const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); + const prefix = ` ${colorCode};1m${name} \x1B[0m`; + args[0] = prefix + args[0].split("\n").join("\n" + prefix); + args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + " " + args[0]; + } } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js -var require_exponentialRetryStrategy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.exponentialRetryStrategy = exponentialRetryStrategy; - exports2.isExponentialRetryResponse = isExponentialRetryResponse; - exports2.isSystemError = isSystemError; - var delay_js_1 = require_delay(); - var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); - var DEFAULT_CLIENT_RETRY_INTERVAL = 1e3; - var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1e3 * 64; - function exponentialRetryStrategy(options = {}) { - const retryInterval = options.retryDelayInMs ?? DEFAULT_CLIENT_RETRY_INTERVAL; - const maxRetryInterval = options.maxRetryDelayInMs ?? DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - return { - name: "exponentialRetryStrategy", - retry({ retryCount, response, responseError }) { - const matchedSystemError = isSystemError(responseError); - const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; - const isExponential = isExponentialRetryResponse(response); - const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; - const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); - if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { - return { skipStrategy: true }; - } - if (responseError && !matchedSystemError && !isExponential) { - return { errorToThrow: responseError }; - } - return (0, delay_js_1.calculateRetryDelay)(retryCount, { - retryDelayInMs: retryInterval, - maxRetryDelayInMs: maxRetryInterval - }); - } - }; + function getDate() { + if (exports2.inspectOpts.hideDate) { + return ""; + } + return (/* @__PURE__ */ new Date()).toISOString() + " "; } - function isExponentialRetryResponse(response) { - return Boolean(response && response.status !== void 0 && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); + function log(...args) { + return process.stderr.write(util.formatWithOptions(exports2.inspectOpts, ...args) + "\n"); } - function isSystemError(err) { - if (!err) { - return false; + function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + delete process.env.DEBUG; } - return err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"; } - } -}); - -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js -var require_retryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.retryPolicy = retryPolicy; - var helpers_js_1 = require_helpers2(); - var AbortError_js_1 = require_AbortError(); - var logger_js_1 = require_logger(); - var constants_js_1 = require_constants13(); - var retryPolicyLogger = (0, logger_js_1.createClientLogger)("ts-http-runtime retryPolicy"); - var retryPolicyName = "retryPolicy"; - function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { - const logger = options.logger || retryPolicyLogger; - return { - name: retryPolicyName, - async sendRequest(request2, next) { - let response; - let responseError; - let retryCount = -1; - retryRequest: while (true) { - retryCount += 1; - response = void 0; - responseError = void 0; - try { - logger.info(`Retry ${retryCount}: Attempting to send request`, request2.requestId); - response = await next(request2); - logger.info(`Retry ${retryCount}: Received a response from request`, request2.requestId); - } catch (e) { - logger.error(`Retry ${retryCount}: Received an error from request`, request2.requestId); - responseError = e; - if (!e || responseError.name !== "RestError") { - throw e; - } - response = responseError.response; - } - if (request2.abortSignal?.aborted) { - logger.error(`Retry ${retryCount}: Request aborted.`); - const abortError = new AbortError_js_1.AbortError(); - throw abortError; - } - if (retryCount >= (options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { - logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); - if (responseError) { - throw responseError; - } else if (response) { - return response; - } else { - throw new Error("Maximum retries reached with no response or error to throw"); - } - } - logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); - strategiesLoop: for (const strategy of strategies) { - const strategyLogger = strategy.logger || logger; - strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); - const modifiers = strategy.retry({ - retryCount, - response, - responseError - }); - if (modifiers.skipStrategy) { - strategyLogger.info(`Retry ${retryCount}: Skipped.`); - continue strategiesLoop; - } - const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; - if (errorToThrow) { - strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); - throw errorToThrow; - } - if (retryAfterInMs || retryAfterInMs === 0) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); - await (0, helpers_js_1.delay)(retryAfterInMs, void 0, { abortSignal: request2.abortSignal }); - continue retryRequest; - } - if (redirectTo) { - strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); - request2.url = redirectTo; - continue retryRequest; - } - } - if (responseError) { - logger.info(`None of the retry strategies could work with the received error. Throwing it.`); - throw responseError; - } - if (response) { - logger.info(`None of the retry strategies could work with the received response. Returning it.`); - return response; - } - } - } - }; + function load2() { + return process.env.DEBUG; + } + function init(debug5) { + debug5.inspectOpts = {}; + const keys = Object.keys(exports2.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug5.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]]; + } } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).split("\n").map((str2) => str2.trim()).join(" "); + }; + formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); + }; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js -var require_defaultRetryPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.defaultRetryPolicyName = void 0; - exports2.defaultRetryPolicy = defaultRetryPolicy; - var exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); - var throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); - var retryPolicy_js_1 = require_retryPolicy(); - var constants_js_1 = require_constants13(); - exports2.defaultRetryPolicyName = "defaultRetryPolicy"; - function defaultRetryPolicy(options = {}) { - return { - name: exports2.defaultRetryPolicyName, - sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { - maxRetries: options.maxRetries ?? constants_js_1.DEFAULT_RETRY_POLICY_COUNT - }).sendRequest - }; +// node_modules/debug/src/index.js +var require_src = __commonJS({ + "node_modules/debug/src/index.js"(exports2, module2) { + if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { + module2.exports = require_browser(); + } else { + module2.exports = require_node(); } } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js -var require_checkEnvironment = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js"(exports2) { +// node_modules/agent-base/dist/helpers.js +var require_helpers2 = __commonJS({ + "node_modules/agent-base/dist/helpers.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isReactNative = exports2.isNodeRuntime = exports2.isNodeLike = exports2.isBun = exports2.isDeno = exports2.isWebWorker = exports2.isBrowser = void 0; - exports2.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; - exports2.isWebWorker = typeof self === "object" && typeof self?.importScripts === "function" && (self.constructor?.name === "DedicatedWorkerGlobalScope" || self.constructor?.name === "ServiceWorkerGlobalScope" || self.constructor?.name === "SharedWorkerGlobalScope"); - exports2.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; - exports2.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; - exports2.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean(globalThis.process.versions?.node); - exports2.isNodeRuntime = exports2.isNodeLike && !exports2.isBun && !exports2.isDeno; - exports2.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; + exports2.req = exports2.json = exports2.toBuffer = void 0; + var http = __importStar2(require("http")); + var https = __importStar2(require("https")); + async function toBuffer(stream) { + let length = 0; + const chunks = []; + for await (const chunk of stream) { + length += chunk.length; + chunks.push(chunk); + } + return Buffer.concat(chunks, length); + } + exports2.toBuffer = toBuffer; + async function json2(stream) { + const buf = await toBuffer(stream); + const str2 = buf.toString("utf8"); + try { + return JSON.parse(str2); + } catch (_err) { + const err = _err; + err.message += ` (input: ${str2})`; + throw err; + } + } + exports2.json = json2; + function req(url, opts = {}) { + const href = typeof url === "string" ? url : url.href; + const req2 = (href.startsWith("https:") ? https : http).request(url, opts); + const promise = new Promise((resolve2, reject) => { + req2.once("response", resolve2).once("error", reject).end(); + }); + req2.then = promise.then.bind(promise); + return req2; + } + exports2.req = req; } }); -// node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js -var require_formDataPolicy = __commonJS({ - "node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js"(exports2) { +// node_modules/agent-base/dist/index.js +var require_dist = __commonJS({ + "node_modules/agent-base/dist/index.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); + } + __setModuleDefault2(result, mod); + return result; + }; + var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding2(exports3, m, p); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.formDataPolicyName = void 0; - exports2.formDataPolicy = formDataPolicy; - var bytesEncoding_js_1 = require_bytesEncoding(); - var checkEnvironment_js_1 = require_checkEnvironment(); - var httpHeaders_js_1 = require_httpHeaders(); - exports2.formDataPolicyName = "formDataPolicy"; - function formDataToFormDataMap(formData) { - const formDataMap = {}; - for (const [key, value] of formData.entries()) { - formDataMap[key] ??= []; - formDataMap[key].push(value); + exports2.Agent = void 0; + var net = __importStar2(require("net")); + var http = __importStar2(require("http")); + var https_1 = require("https"); + __exportStar2(require_helpers2(), exports2); + var INTERNAL = /* @__PURE__ */ Symbol("AgentBaseInternalState"); + var Agent = class extends http.Agent { + constructor(opts) { + super(opts); + this[INTERNAL] = {}; } - return formDataMap; - } - function formDataPolicy() { - return { - name: exports2.formDataPolicyName, - async sendRequest(request2, next) { - if (checkEnvironment_js_1.isNodeLike && typeof FormData !== "undefined" && request2.body instanceof FormData) { - request2.formData = formDataToFormDataMap(request2.body); - request2.body = void 0; - } - if (request2.formData) { - const contentType = request2.headers.get("Content-Type"); - if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { - request2.body = wwwFormUrlEncode(request2.formData); - } else { - await prepareFormData(request2.formData, request2); - } - request2.formData = void 0; + /** + * Determine whether this is an `http` or `https` request. + */ + isSecureEndpoint(options) { + if (options) { + if (typeof options.secureEndpoint === "boolean") { + return options.secureEndpoint; } - return next(request2); - } - }; - } - function wwwFormUrlEncode(formData) { - const urlSearchParams = new URLSearchParams(); - for (const [key, value] of Object.entries(formData)) { - if (Array.isArray(value)) { - for (const subValue of value) { - urlSearchParams.append(key, subValue.toString()); + if (typeof options.protocol === "string") { + return options.protocol === "https:"; } - } else { - urlSearchParams.append(key, value.toString()); } + const { stack } = new Error(); + if (typeof stack !== "string") + return false; + return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1); } - return urlSearchParams.toString(); - } - async function prepareFormData(formData, request2) { - const contentType = request2.headers.get("Content-Type"); - if (contentType && !contentType.startsWith("multipart/form-data")) { - return; + // In order to support async signatures in `connect()` and Node's native + // connection pooling in `http.Agent`, the array of sockets for each origin + // has to be updated synchronously. This is so the length of the array is + // accurate when `addRequest()` is next called. We achieve this by creating a + // fake socket and adding it to `sockets[origin]` and incrementing + // `totalSocketCount`. + incrementSockets(name) { + if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { + return null; + } + if (!this.sockets[name]) { + this.sockets[name] = []; + } + const fakeSocket = new net.Socket({ writable: false }); + this.sockets[name].push(fakeSocket); + this.totalSocketCount++; + return fakeSocket; } - request2.headers.set("Content-Type", contentType ?? "multipart/form-data"); - const parts = []; - for (const [fieldName, values] of Object.entries(formData)) { - for (const value of Array.isArray(values) ? values : [values]) { - if (typeof value === "string") { - parts.push({ - headers: (0, httpHeaders_js_1.createHttpHeaders)({ - "Content-Disposition": `form-data; name="${fieldName}"` - }), - body: (0, bytesEncoding_js_1.stringToUint8Array)(value, "utf-8") - }); - } else if (value === void 0 || value === null || typeof value !== "object") { - throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); - } else { - const fileName = value.name || "blob"; - const headers = (0, httpHeaders_js_1.createHttpHeaders)(); - headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); - headers.set("Content-Type", value.type || "application/octet-stream"); - parts.push({ - headers, - body: value - }); + decrementSockets(name, socket) { + if (!this.sockets[name] || socket === null) { + return; + } + const sockets = this.sockets[name]; + const index = sockets.indexOf(socket); + if (index !== -1) { + sockets.splice(index, 1); + this.totalSocketCount--; + if (sockets.length === 0) { + delete this.sockets[name]; } } } - request2.multipartBody = { parts }; - } - } -}); - -// node_modules/ms/index.js -var require_ms = __commonJS({ - "node_modules/ms/index.js"(exports2, module2) { - var s = 1e3; - var m = s * 60; - var h = m * 60; - var d = h * 24; - var w = d * 7; - var y = d * 365.25; - module2.exports = function(val, options) { - options = options || {}; - var type2 = typeof val; - if (type2 === "string" && val.length > 0) { - return parse2(val); - } else if (type2 === "number" && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); - } - throw new Error( - "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) - ); - }; - function parse2(str2) { - str2 = String(str2); - if (str2.length > 100) { - return; - } - var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str2 - ); - if (!match) { - return; - } - var n = parseFloat(match[1]); - var type2 = (match[2] || "ms").toLowerCase(); - switch (type2) { - case "years": - case "year": - case "yrs": - case "yr": - case "y": - return n * y; - case "weeks": - case "week": - case "w": - return n * w; - case "days": - case "day": - case "d": - return n * d; - case "hours": - case "hour": - case "hrs": - case "hr": - case "h": - return n * h; - case "minutes": - case "minute": - case "mins": - case "min": - case "m": - return n * m; - case "seconds": - case "second": - case "secs": - case "sec": - case "s": - return n * s; - case "milliseconds": - case "millisecond": - case "msecs": - case "msec": - case "ms": - return n; - default: - return void 0; - } - } - function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + "d"; - } - if (msAbs >= h) { - return Math.round(ms / h) + "h"; + // In order to properly update the socket pool, we need to call `getName()` on + // the core `https.Agent` if it is a secureEndpoint. + getName(options) { + const secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : this.isSecureEndpoint(options); + if (secureEndpoint) { + return https_1.Agent.prototype.getName.call(this, options); + } + return super.getName(options); } - if (msAbs >= m) { - return Math.round(ms / m) + "m"; + createSocket(req, options, cb) { + const connectOpts = { + ...options, + secureEndpoint: this.isSecureEndpoint(options) + }; + const name = this.getName(connectOpts); + const fakeSocket = this.incrementSockets(name); + Promise.resolve().then(() => this.connect(req, connectOpts)).then((socket) => { + this.decrementSockets(name, fakeSocket); + if (socket instanceof http.Agent) { + try { + return socket.addRequest(req, connectOpts); + } catch (err) { + return cb(err); + } + } + this[INTERNAL].currentSocket = socket; + super.createSocket(req, options, cb); + }, (err) => { + this.decrementSockets(name, fakeSocket); + cb(err); + }); } - if (msAbs >= s) { - return Math.round(ms / s) + "s"; + createConnection() { + const socket = this[INTERNAL].currentSocket; + this[INTERNAL].currentSocket = void 0; + if (!socket) { + throw new Error("No socket was returned in the `connect()` function"); + } + return socket; } - return ms + "ms"; - } - function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, "day"); + get defaultPort() { + return this[INTERNAL].defaultPort ?? (this.protocol === "https:" ? 443 : 80); } - if (msAbs >= h) { - return plural(ms, msAbs, h, "hour"); + set defaultPort(v) { + if (this[INTERNAL]) { + this[INTERNAL].defaultPort = v; + } } - if (msAbs >= m) { - return plural(ms, msAbs, m, "minute"); + get protocol() { + return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:"); } - if (msAbs >= s) { - return plural(ms, msAbs, s, "second"); + set protocol(v) { + if (this[INTERNAL]) { + this[INTERNAL].protocol = v; + } } - return ms + " ms"; - } - function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); - } + }; + exports2.Agent = Agent; } }); -// node_modules/debug/src/common.js -var require_common = __commonJS({ - "node_modules/debug/src/common.js"(exports2, module2) { - function setup(env) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce2; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = require_ms(); - createDebug.destroy = destroy; - Object.keys(env).forEach((key) => { - createDebug[key] = env[key]; - }); - createDebug.names = []; - createDebug.skips = []; - createDebug.formatters = {}; - function selectColor(namespace) { - let hash = 0; - for (let i = 0; i < namespace.length; i++) { - hash = (hash << 5) - hash + namespace.charCodeAt(i); - hash |= 0; +// node_modules/https-proxy-agent/dist/parse-proxy-response.js +var require_parse_proxy_response = __commonJS({ + "node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports2) { + "use strict"; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.parseProxyResponse = void 0; + var debug_1 = __importDefault2(require_src()); + var debug5 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); + function parseProxyResponse(socket) { + return new Promise((resolve2, reject) => { + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once("readable", read); } - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; - } - createDebug.selectColor = selectColor; - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - function debug5(...args) { - if (!debug5.enabled) { + function cleanup() { + socket.removeListener("end", onend); + socket.removeListener("error", onerror); + socket.removeListener("readable", read); + } + function onend() { + cleanup(); + debug5("onend"); + reject(new Error("Proxy connection ended before receiving CONNECT response")); + } + function onerror(err) { + cleanup(); + debug5("onerror %o", err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf("\r\n\r\n"); + if (endOfHeaders === -1) { + debug5("have not received end of HTTP headers yet..."); + read(); return; } - const self2 = debug5; - const curr = Number(/* @__PURE__ */ new Date()); - const ms = curr - (prevTime || curr); - self2.diff = ms; - self2.prev = prevTime; - self2.curr = curr; - prevTime = curr; - args[0] = createDebug.coerce(args[0]); - if (typeof args[0] !== "string") { - args.unshift("%O"); + const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n"); + const firstLine = headerParts.shift(); + if (!firstLine) { + socket.destroy(); + return reject(new Error("No header received from proxy CONNECT response")); } - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { - if (match === "%%") { - return "%"; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === "function") { - const val = args[index]; - match = formatter.call(self2, val); - args.splice(index, 1); - index--; - } - return match; - }); - createDebug.formatArgs.call(self2, args); - const logFn = self2.log || createDebug.log; - logFn.apply(self2, args); - } - debug5.namespace = namespace; - debug5.useColors = createDebug.useColors(); - debug5.color = createDebug.selectColor(namespace); - debug5.extend = extend3; - debug5.destroy = createDebug.destroy; - Object.defineProperty(debug5, "enabled", { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; - } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); + const firstLineParts = firstLine.split(" "); + const statusCode = +firstLineParts[1]; + const statusText = firstLineParts.slice(2).join(" "); + const headers = {}; + for (const header of headerParts) { + if (!header) + continue; + const firstColon = header.indexOf(":"); + if (firstColon === -1) { + socket.destroy(); + return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); } - return enabledCache; - }, - set: (v) => { - enableOverride = v; - } - }); - if (typeof createDebug.init === "function") { - createDebug.init(debug5); - } - return debug5; - } - function extend3(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; - } - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - createDebug.names = []; - createDebug.skips = []; - const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean); - for (const ns of split) { - if (ns[0] === "-") { - createDebug.skips.push(ns.slice(1)); - } else { - createDebug.names.push(ns); - } - } - } - function matchesTemplate(search, template) { - let searchIndex = 0; - let templateIndex = 0; - let starIndex = -1; - let matchIndex = 0; - while (searchIndex < search.length) { - if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) { - if (template[templateIndex] === "*") { - starIndex = templateIndex; - matchIndex = searchIndex; - templateIndex++; + const key = header.slice(0, firstColon).toLowerCase(); + const value = header.slice(firstColon + 1).trimStart(); + const current = headers[key]; + if (typeof current === "string") { + headers[key] = [current, value]; + } else if (Array.isArray(current)) { + current.push(value); } else { - searchIndex++; - templateIndex++; + headers[key] = value; } - } else if (starIndex !== -1) { - templateIndex = starIndex + 1; - matchIndex++; - searchIndex = matchIndex; - } else { - return false; - } - } - while (templateIndex < template.length && template[templateIndex] === "*") { - templateIndex++; - } - return templateIndex === template.length; - } - function disable() { - const namespaces = [ - ...createDebug.names, - ...createDebug.skips.map((namespace) => "-" + namespace) - ].join(","); - createDebug.enable(""); - return namespaces; - } - function enabled(name) { - for (const skip of createDebug.skips) { - if (matchesTemplate(name, skip)) { - return false; - } - } - for (const ns of createDebug.names) { - if (matchesTemplate(name, ns)) { - return true; } + debug5("got proxy server response: %o %o", firstLine, headers); + cleanup(); + resolve2({ + connect: { + statusCode, + statusText, + headers + }, + buffered + }); } - return false; - } - function coerce2(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; - } - function destroy() { - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); - } - createDebug.enable(createDebug.load()); - return createDebug; + socket.on("error", onerror); + socket.on("end", onend); + read(); + }); } - module2.exports = setup; + exports2.parseProxyResponse = parseProxyResponse; } }); -// node_modules/debug/src/browser.js -var require_browser = __commonJS({ - "node_modules/debug/src/browser.js"(exports2, module2) { - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load2; - exports2.useColors = useColors; - exports2.storage = localstorage(); - exports2.destroy = /* @__PURE__ */ (() => { - let warned = false; - return () => { - if (!warned) { - warned = true; - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); - } - }; - })(); - exports2.colors = [ - "#0000CC", - "#0000FF", - "#0033CC", - "#0033FF", - "#0066CC", - "#0066FF", - "#0099CC", - "#0099FF", - "#00CC00", - "#00CC33", - "#00CC66", - "#00CC99", - "#00CCCC", - "#00CCFF", - "#3300CC", - "#3300FF", - "#3333CC", - "#3333FF", - "#3366CC", - "#3366FF", - "#3399CC", - "#3399FF", - "#33CC00", - "#33CC33", - "#33CC66", - "#33CC99", - "#33CCCC", - "#33CCFF", - "#6600CC", - "#6600FF", - "#6633CC", - "#6633FF", - "#66CC00", - "#66CC33", - "#9900CC", - "#9900FF", - "#9933CC", - "#9933FF", - "#99CC00", - "#99CC33", - "#CC0000", - "#CC0033", - "#CC0066", - "#CC0099", - "#CC00CC", - "#CC00FF", - "#CC3300", - "#CC3333", - "#CC3366", - "#CC3399", - "#CC33CC", - "#CC33FF", - "#CC6600", - "#CC6633", - "#CC9900", - "#CC9933", - "#CCCC00", - "#CCCC33", - "#FF0000", - "#FF0033", - "#FF0066", - "#FF0099", - "#FF00CC", - "#FF00FF", - "#FF3300", - "#FF3333", - "#FF3366", - "#FF3399", - "#FF33CC", - "#FF33FF", - "#FF6600", - "#FF6633", - "#FF9900", - "#FF9933", - "#FFCC00", - "#FFCC33" - ]; - function useColors() { - if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { - return true; - } - if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } - let m; - return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 - typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - typeof navigator !== "undefined" && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker - typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); - } - function formatArgs(args) { - args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); - if (!this.useColors) { - return; - } - const c = "color: " + this.color; - args.splice(1, 0, c, "color: inherit"); - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, (match) => { - if (match === "%%") { - return; - } - index++; - if (match === "%c") { - lastC = index; - } - }); - args.splice(lastC, 0, c); - } - exports2.log = console.debug || console.log || (() => { - }); - function save(namespaces) { - try { - if (namespaces) { - exports2.storage.setItem("debug", namespaces); - } else { - exports2.storage.removeItem("debug"); - } - } catch (error3) { - } - } - function load2() { - let r; - try { - r = exports2.storage.getItem("debug") || exports2.storage.getItem("DEBUG"); - } catch (error3) { - } - if (!r && typeof process !== "undefined" && "env" in process) { - r = process.env.DEBUG; - } - return r; - } - function localstorage() { - try { - return localStorage; - } catch (error3) { - } - } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.j = function(v) { - try { - return JSON.stringify(v); - } catch (error3) { - return "[UnexpectedJSONParseError]: " + error3.message; - } - }; - } -}); - -// node_modules/has-flag/index.js -var require_has_flag = __commonJS({ - "node_modules/has-flag/index.js"(exports2, module2) { - "use strict"; - module2.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf("--"); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); - }; - } -}); - -// node_modules/supports-color/index.js -var require_supports_color = __commonJS({ - "node_modules/supports-color/index.js"(exports2, module2) { - "use strict"; - var os2 = require("os"); - var tty = require("tty"); - var hasFlag = require_has_flag(); - var { env } = process; - var forceColor; - if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { - forceColor = 0; - } else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { - forceColor = 1; - } - if ("FORCE_COLOR" in env) { - if (env.FORCE_COLOR === "true") { - forceColor = 1; - } else if (env.FORCE_COLOR === "false") { - forceColor = 0; - } else { - forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3); - } - } - function translateLevel(level) { - if (level === 0) { - return false; - } - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; - } - function supportsColor(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } - if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { - return 3; - } - if (hasFlag("color=256")) { - return 2; - } - if (haveStream && !streamIsTTY && forceColor === void 0) { - return 0; - } - const min = forceColor || 0; - if (env.TERM === "dumb") { - return min; - } - if (process.platform === "win32") { - const osRelease = os2.release().split("."); - if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } - return 1; - } - if ("CI" in env) { - if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { - return 1; - } - return min; - } - if ("TEAMCITY_VERSION" in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } - if (env.COLORTERM === "truecolor") { - return 3; - } - if ("TERM_PROGRAM" in env) { - const version = parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); - switch (env.TERM_PROGRAM) { - case "iTerm.app": - return version >= 3 ? 3 : 2; - case "Apple_Terminal": - return 2; - } - } - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } - if ("COLORTERM" in env) { - return 1; - } - return min; - } - function getSupportLevel(stream) { - const level = supportsColor(stream, stream && stream.isTTY); - return translateLevel(level); - } - module2.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel(supportsColor(true, tty.isatty(1))), - stderr: translateLevel(supportsColor(true, tty.isatty(2))) - }; - } -}); - -// node_modules/debug/src/node.js -var require_node = __commonJS({ - "node_modules/debug/src/node.js"(exports2, module2) { - var tty = require("tty"); - var util = require("util"); - exports2.init = init; - exports2.log = log; - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load2; - exports2.useColors = useColors; - exports2.destroy = util.deprecate( - () => { - }, - "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." - ); - exports2.colors = [6, 2, 3, 4, 5, 1]; - try { - const supportsColor = require_supports_color(); - if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { - exports2.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; - } - } catch (error3) { - } - exports2.inspectOpts = Object.keys(process.env).filter((key) => { - return /^debug_/i.test(key); - }).reduce((obj, key) => { - const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === "null") { - val = null; - } else { - val = Number(val); - } - obj[prop] = val; - return obj; - }, {}); - function useColors() { - return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty.isatty(process.stderr.fd); - } - function formatArgs(args) { - const { namespace: name, useColors: useColors2 } = this; - if (useColors2) { - const c = this.color; - const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); - const prefix = ` ${colorCode};1m${name} \x1B[0m`; - args[0] = prefix + args[0].split("\n").join("\n" + prefix); - args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); - } else { - args[0] = getDate() + name + " " + args[0]; - } - } - function getDate() { - if (exports2.inspectOpts.hideDate) { - return ""; - } - return (/* @__PURE__ */ new Date()).toISOString() + " "; - } - function log(...args) { - return process.stderr.write(util.formatWithOptions(exports2.inspectOpts, ...args) + "\n"); - } - function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - delete process.env.DEBUG; - } - } - function load2() { - return process.env.DEBUG; - } - function init(debug5) { - debug5.inspectOpts = {}; - const keys = Object.keys(exports2.inspectOpts); - for (let i = 0; i < keys.length; i++) { - debug5.inspectOpts[keys[i]] = exports2.inspectOpts[keys[i]]; - } - } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.o = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts).split("\n").map((str2) => str2.trim()).join(" "); - }; - formatters.O = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); - }; - } -}); - -// node_modules/debug/src/index.js -var require_src = __commonJS({ - "node_modules/debug/src/index.js"(exports2, module2) { - if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { - module2.exports = require_browser(); - } else { - module2.exports = require_node(); - } - } -}); - -// node_modules/agent-base/dist/helpers.js -var require_helpers3 = __commonJS({ - "node_modules/agent-base/dist/helpers.js"(exports2) { +// node_modules/https-proxy-agent/dist/index.js +var require_dist2 = __commonJS({ + "node_modules/https-proxy-agent/dist/index.js"(exports2) { "use strict"; var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -73902,347 +72105,25 @@ var require_helpers3 = __commonJS({ __setModuleDefault2(result, mod); return result; }; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.req = exports2.json = exports2.toBuffer = void 0; - var http = __importStar2(require("http")); - var https = __importStar2(require("https")); - async function toBuffer(stream) { - let length = 0; - const chunks = []; - for await (const chunk of stream) { - length += chunk.length; - chunks.push(chunk); - } - return Buffer.concat(chunks, length); - } - exports2.toBuffer = toBuffer; - async function json2(stream) { - const buf = await toBuffer(stream); - const str2 = buf.toString("utf8"); - try { - return JSON.parse(str2); - } catch (_err) { - const err = _err; - err.message += ` (input: ${str2})`; - throw err; - } - } - exports2.json = json2; - function req(url, opts = {}) { - const href = typeof url === "string" ? url : url.href; - const req2 = (href.startsWith("https:") ? https : http).request(url, opts); - const promise = new Promise((resolve2, reject) => { - req2.once("response", resolve2).once("error", reject).end(); - }); - req2.then = promise.then.bind(promise); - return req2; - } - exports2.req = req; - } -}); - -// node_modules/agent-base/dist/index.js -var require_dist = __commonJS({ - "node_modules/agent-base/dist/index.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; - var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding2(exports3, m, p); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Agent = void 0; - var net = __importStar2(require("net")); - var http = __importStar2(require("http")); - var https_1 = require("https"); - __exportStar2(require_helpers3(), exports2); - var INTERNAL = /* @__PURE__ */ Symbol("AgentBaseInternalState"); - var Agent = class extends http.Agent { - constructor(opts) { - super(opts); - this[INTERNAL] = {}; - } - /** - * Determine whether this is an `http` or `https` request. - */ - isSecureEndpoint(options) { - if (options) { - if (typeof options.secureEndpoint === "boolean") { - return options.secureEndpoint; - } - if (typeof options.protocol === "string") { - return options.protocol === "https:"; - } - } - const { stack } = new Error(); - if (typeof stack !== "string") - return false; - return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1); - } - // In order to support async signatures in `connect()` and Node's native - // connection pooling in `http.Agent`, the array of sockets for each origin - // has to be updated synchronously. This is so the length of the array is - // accurate when `addRequest()` is next called. We achieve this by creating a - // fake socket and adding it to `sockets[origin]` and incrementing - // `totalSocketCount`. - incrementSockets(name) { - if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { - return null; - } - if (!this.sockets[name]) { - this.sockets[name] = []; - } - const fakeSocket = new net.Socket({ writable: false }); - this.sockets[name].push(fakeSocket); - this.totalSocketCount++; - return fakeSocket; - } - decrementSockets(name, socket) { - if (!this.sockets[name] || socket === null) { - return; - } - const sockets = this.sockets[name]; - const index = sockets.indexOf(socket); - if (index !== -1) { - sockets.splice(index, 1); - this.totalSocketCount--; - if (sockets.length === 0) { - delete this.sockets[name]; - } - } - } - // In order to properly update the socket pool, we need to call `getName()` on - // the core `https.Agent` if it is a secureEndpoint. - getName(options) { - const secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : this.isSecureEndpoint(options); - if (secureEndpoint) { - return https_1.Agent.prototype.getName.call(this, options); - } - return super.getName(options); - } - createSocket(req, options, cb) { - const connectOpts = { - ...options, - secureEndpoint: this.isSecureEndpoint(options) - }; - const name = this.getName(connectOpts); - const fakeSocket = this.incrementSockets(name); - Promise.resolve().then(() => this.connect(req, connectOpts)).then((socket) => { - this.decrementSockets(name, fakeSocket); - if (socket instanceof http.Agent) { - try { - return socket.addRequest(req, connectOpts); - } catch (err) { - return cb(err); - } - } - this[INTERNAL].currentSocket = socket; - super.createSocket(req, options, cb); - }, (err) => { - this.decrementSockets(name, fakeSocket); - cb(err); - }); - } - createConnection() { - const socket = this[INTERNAL].currentSocket; - this[INTERNAL].currentSocket = void 0; - if (!socket) { - throw new Error("No socket was returned in the `connect()` function"); - } - return socket; - } - get defaultPort() { - return this[INTERNAL].defaultPort ?? (this.protocol === "https:" ? 443 : 80); - } - set defaultPort(v) { - if (this[INTERNAL]) { - this[INTERNAL].defaultPort = v; - } - } - get protocol() { - return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:"); - } - set protocol(v) { - if (this[INTERNAL]) { - this[INTERNAL].protocol = v; - } - } - }; - exports2.Agent = Agent; - } -}); - -// node_modules/https-proxy-agent/dist/parse-proxy-response.js -var require_parse_proxy_response = __commonJS({ - "node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports2) { - "use strict"; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.parseProxyResponse = void 0; - var debug_1 = __importDefault2(require_src()); - var debug5 = (0, debug_1.default)("https-proxy-agent:parse-proxy-response"); - function parseProxyResponse(socket) { - return new Promise((resolve2, reject) => { - let buffersLength = 0; - const buffers = []; - function read() { - const b = socket.read(); - if (b) - ondata(b); - else - socket.once("readable", read); - } - function cleanup() { - socket.removeListener("end", onend); - socket.removeListener("error", onerror); - socket.removeListener("readable", read); - } - function onend() { - cleanup(); - debug5("onend"); - reject(new Error("Proxy connection ended before receiving CONNECT response")); - } - function onerror(err) { - cleanup(); - debug5("onerror %o", err); - reject(err); - } - function ondata(b) { - buffers.push(b); - buffersLength += b.length; - const buffered = Buffer.concat(buffers, buffersLength); - const endOfHeaders = buffered.indexOf("\r\n\r\n"); - if (endOfHeaders === -1) { - debug5("have not received end of HTTP headers yet..."); - read(); - return; - } - const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n"); - const firstLine = headerParts.shift(); - if (!firstLine) { - socket.destroy(); - return reject(new Error("No header received from proxy CONNECT response")); - } - const firstLineParts = firstLine.split(" "); - const statusCode = +firstLineParts[1]; - const statusText = firstLineParts.slice(2).join(" "); - const headers = {}; - for (const header of headerParts) { - if (!header) - continue; - const firstColon = header.indexOf(":"); - if (firstColon === -1) { - socket.destroy(); - return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); - } - const key = header.slice(0, firstColon).toLowerCase(); - const value = header.slice(firstColon + 1).trimStart(); - const current = headers[key]; - if (typeof current === "string") { - headers[key] = [current, value]; - } else if (Array.isArray(current)) { - current.push(value); - } else { - headers[key] = value; - } - } - debug5("got proxy server response: %o %o", firstLine, headers); - cleanup(); - resolve2({ - connect: { - statusCode, - statusText, - headers - }, - buffered - }); - } - socket.on("error", onerror); - socket.on("end", onend); - read(); - }); - } - exports2.parseProxyResponse = parseProxyResponse; - } -}); - -// node_modules/https-proxy-agent/dist/index.js -var require_dist2 = __commonJS({ - "node_modules/https-proxy-agent/dist/index.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.HttpsProxyAgent = void 0; - var net = __importStar2(require("net")); - var tls = __importStar2(require("tls")); - var assert_1 = __importDefault2(require("assert")); - var debug_1 = __importDefault2(require_src()); - var agent_base_1 = require_dist(); - var url_1 = require("url"); - var parse_proxy_response_1 = require_parse_proxy_response(); - var debug5 = (0, debug_1.default)("https-proxy-agent"); - var setServernameFromNonIpHost = (options) => { - if (options.servername === void 0 && options.host && !net.isIP(options.host)) { - return { - ...options, - servername: options.host - }; + exports2.HttpsProxyAgent = void 0; + var net = __importStar2(require("net")); + var tls = __importStar2(require("tls")); + var assert_1 = __importDefault2(require("assert")); + var debug_1 = __importDefault2(require_src()); + var agent_base_1 = require_dist(); + var url_1 = require("url"); + var parse_proxy_response_1 = require_parse_proxy_response(); + var debug5 = (0, debug_1.default)("https-proxy-agent"); + var setServernameFromNonIpHost = (options) => { + if (options.servername === void 0 && options.host && !net.isIP(options.host)) { + return { + ...options, + servername: options.host + }; } return options; }; @@ -75555,8 +73436,8 @@ var require_getClient = __commonJS({ } const { allowInsecureConnection, httpClient } = clientOptions; const endpointUrl = clientOptions.endpoint ?? endpoint2; - const client = (path3, ...args) => { - const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path3, args, { allowInsecureConnection, ...requestOptions }); + const client = (path4, ...args) => { + const getUrl = (requestOptions) => (0, urlHelpers_js_1.buildRequestUrl)(endpointUrl, path4, args, { allowInsecureConnection, ...requestOptions }); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection, httpClient); @@ -79427,15 +77308,15 @@ var require_urlHelpers2 = __commonJS({ let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { - let path3 = replaceAll(operationSpec.path, urlReplacements); - if (operationSpec.path === "/{nextLink}" && path3.startsWith("/")) { - path3 = path3.substring(1); + let path4 = replaceAll(operationSpec.path, urlReplacements); + if (operationSpec.path === "/{nextLink}" && path4.startsWith("/")) { + path4 = path4.substring(1); } - if (isAbsoluteUrl(path3)) { - requestUrl = path3; + if (isAbsoluteUrl(path4)) { + requestUrl = path4; isAbsolutePath = true; } else { - requestUrl = appendPath(requestUrl, path3); + requestUrl = appendPath(requestUrl, path4); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); @@ -79481,9 +77362,9 @@ var require_urlHelpers2 = __commonJS({ } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { - const path3 = pathToAppend.substring(0, searchStart); + const path4 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); - newPath = newPath + path3; + newPath = newPath + path4; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } @@ -81716,10 +79597,10 @@ var require_utils_common = __commonJS({ var constants_js_1 = require_constants15(); function escapeURLPath(url) { const urlParsed = new URL(url); - let path3 = urlParsed.pathname; - path3 = path3 || "/"; - path3 = escape2(path3); - urlParsed.pathname = path3; + let path4 = urlParsed.pathname; + path4 = path4 || "/"; + path4 = escape2(path4); + urlParsed.pathname = path4; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -81804,9 +79685,9 @@ var require_utils_common = __commonJS({ } function appendToURLPath(url, name) { const urlParsed = new URL(url); - let path3 = urlParsed.pathname; - path3 = path3 ? path3.endsWith("/") ? `${path3}${name}` : `${path3}/${name}` : name; - urlParsed.pathname = path3; + let path4 = urlParsed.pathname; + path4 = path4 ? path4.endsWith("/") ? `${path4}${name}` : `${path4}/${name}` : name; + urlParsed.pathname = path4; return urlParsed.toString(); } function setURLParameter(url, name, value) { @@ -83033,9 +80914,9 @@ var require_StorageSharedKeyCredentialPolicy = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path3 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path4 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path3}`; + canonicalizedResourceString += `/${this.factory.accountName}${path4}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -83774,10 +81655,10 @@ var require_utils_common2 = __commonJS({ var constants_js_1 = require_constants16(); function escapeURLPath(url) { const urlParsed = new URL(url); - let path3 = urlParsed.pathname; - path3 = path3 || "/"; - path3 = escape2(path3); - urlParsed.pathname = path3; + let path4 = urlParsed.pathname; + path4 = path4 || "/"; + path4 = escape2(path4); + urlParsed.pathname = path4; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -83862,9 +81743,9 @@ var require_utils_common2 = __commonJS({ } function appendToURLPath(url, name) { const urlParsed = new URL(url); - let path3 = urlParsed.pathname; - path3 = path3 ? path3.endsWith("/") ? `${path3}${name}` : `${path3}/${name}` : name; - urlParsed.pathname = path3; + let path4 = urlParsed.pathname; + path4 = path4 ? path4.endsWith("/") ? `${path4}${name}` : `${path4}/${name}` : name; + urlParsed.pathname = path4; return urlParsed.toString(); } function setURLParameter(url, name, value) { @@ -84785,9 +82666,9 @@ var require_StorageSharedKeyCredentialPolicy2 = __commonJS({ * @param request - */ getCanonicalizedResourceString(request2) { - const path3 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path4 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path3}`; + canonicalizedResourceString += `/${this.factory.accountName}${path4}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -85417,9 +83298,9 @@ var require_StorageSharedKeyCredentialPolicyV2 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path3 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path4 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path3}`; + canonicalizedResourceString += `/${options.accountName}${path4}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -85764,9 +83645,9 @@ var require_StorageSharedKeyCredentialPolicyV22 = __commonJS({ return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request2) { - const path3 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; + const path4 = (0, utils_common_js_1.getURLPath)(request2.url) || "/"; let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${options.accountName}${path3}`; + canonicalizedResourceString += `/${options.accountName}${path4}`; const queries = (0, utils_common_js_1.getURLQueries)(request2.url); const lowercaseQueries = {}; if (queries) { @@ -103192,1200 +101073,3111 @@ var require_operation2 = __commonJS({ return "running"; } } - const status = helper(); - return status === "running" && operationLocation === void 0 ? "succeeded" : status; - } - exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; - async function initHttpOperation(inputs) { - const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; - return (0, operation_js_1.initOperation)({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = inferLroMode({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - stateProxy, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, - getOperationStatus: getStatusFromInitialResponse, - setErrorAsResult - }); - } - exports2.initHttpOperation = initHttpOperation; - function getOperationLocation({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getOperationLocationPollingUrl({ - operationLocation: getOperationLocationHeader(rawResponse), - azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) - }); + const status = helper(); + return status === "running" && operationLocation === void 0 ? "succeeded" : status; + } + exports2.getStatusFromInitialResponse = getStatusFromInitialResponse; + async function initHttpOperation(inputs) { + const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; + return (0, operation_js_1.initOperation)({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = inferLroMode({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + stateProxy, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, + getOperationStatus: getStatusFromInitialResponse, + setErrorAsResult + }); + } + exports2.initHttpOperation = initHttpOperation; + function getOperationLocation({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getOperationLocationPollingUrl({ + operationLocation: getOperationLocationHeader(rawResponse), + azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) + }); + } + case "ResourceLocation": { + return getLocationHeader(rawResponse); + } + case "Body": + default: { + return void 0; + } + } + } + exports2.getOperationLocation = getOperationLocation; + function getOperationStatus({ rawResponse }, state) { + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; + switch (mode) { + case "OperationLocation": { + return getStatus(rawResponse); + } + case "ResourceLocation": { + return toOperationStatus(rawResponse.statusCode); + } + case "Body": { + return getProvisioningState(rawResponse); + } + default: + throw new Error(`Internal error: Unexpected operation mode: ${mode}`); + } + } + exports2.getOperationStatus = getOperationStatus; + function accessBodyProperty({ flatResponse, rawResponse }, prop) { + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; + } + function getResourceLocation(res, state) { + const loc = accessBodyProperty(res, "resourceLocation"); + if (loc && typeof loc === "string") { + state.config.resourceLocation = loc; + } + return state.config.resourceLocation; + } + exports2.getResourceLocation = getResourceLocation; + function isOperationError(e) { + return e.name === "RestError"; + } + exports2.isOperationError = isOperationError; + async function pollHttpOperation(inputs) { + const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; + return (0, operation_js_1.pollOperation)({ + state, + stateProxy, + setDelay, + processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, + getError: getErrorFromResponse, + updateState, + getPollingInterval: parseRetryAfter, + getOperationLocation, + getOperationStatus, + isOperationError, + getResourceLocation, + options, + /** + * The expansion here is intentional because `lro` could be an object that + * references an inner this, so we need to preserve a reference to it. + */ + poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), + setErrorAsResult + }); + } + exports2.pollHttpOperation = pollHttpOperation; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/poller/poller.js +var require_poller = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.buildCreatePoller = void 0; + var operation_js_1 = require_operation(); + var constants_js_1 = require_constants17(); + var core_util_1 = require_commonjs4(); + var createStateProxy = () => ({ + /** + * The state at this point is created to be of type OperationState. + * It will be updated later to be of type TState when the + * customer-provided callback, `updateState`, is called during polling. + */ + initState: (config) => ({ status: "running", config }), + setCanceled: (state) => state.status = "canceled", + setError: (state, error3) => state.error = error3, + setResult: (state, result) => state.result = result, + setRunning: (state) => state.status = "running", + setSucceeded: (state) => state.status = "succeeded", + setFailed: (state) => state.status = "failed", + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => state.status === "canceled", + isFailed: (state) => state.status === "failed", + isRunning: (state) => state.status === "running", + isSucceeded: (state) => state.status === "succeeded" + }); + function buildCreatePoller(inputs) { + const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; + return async ({ init, poll }, options) => { + const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; + const stateProxy = createStateProxy(); + const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { + let called = false; + return (operationLocation, isUpdated) => { + if (isUpdated) + withOperationLocationCallback(operationLocation); + else if (!called) + withOperationLocationCallback(operationLocation); + called = true; + }; + })() : void 0; + const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ + init, + stateProxy, + processResult, + getOperationStatus: getStatusFromInitialResponse, + withOperationLocation, + setErrorAsResult: !resolveOnUnsuccessful + }); + let resultPromise; + const abortController = new AbortController(); + const handlers = /* @__PURE__ */ new Map(); + const handleProgressEvents = async () => handlers.forEach((h) => h(state)); + const cancelErrMsg = "Operation was canceled"; + let currentPollIntervalInMs = intervalInMs; + const poller = { + getOperationState: () => state, + getResult: () => state.result, + isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), + isStopped: () => resultPromise === void 0, + stopPolling: () => { + abortController.abort(); + }, + toString: () => JSON.stringify({ + state + }), + onProgress: (callback) => { + const s = /* @__PURE__ */ Symbol(); + handlers.set(s, callback); + return () => handlers.delete(s); + }, + pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { + const { abortSignal: inputAbortSignal } = pollOptions || {}; + function abortListener() { + abortController.abort(); + } + const abortSignal = abortController.signal; + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { + abortController.abort(); + } else if (!abortSignal.aborted) { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + } + try { + if (!poller.isDone()) { + await poller.poll({ abortSignal }); + while (!poller.isDone()) { + await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); + await poller.poll({ abortSignal }); + } + } + } finally { + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + } + if (resolveOnUnsuccessful) { + return poller.getResult(); + } else { + switch (state.status) { + case "succeeded": + return poller.getResult(); + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + case "notStarted": + case "running": + throw new Error(`Polling completed without succeeding or failing`); + } + } + })().finally(() => { + resultPromise = void 0; + }), + async poll(pollOptions) { + if (resolveOnUnsuccessful) { + if (poller.isDone()) + return; + } else { + switch (state.status) { + case "succeeded": + return; + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + await (0, operation_js_1.pollOperation)({ + poll, + state, + stateProxy, + getOperationLocation, + isOperationError, + withOperationLocation, + getPollingInterval, + getOperationStatus: getStatusFromPollResponse, + getResourceLocation, + processResult, + getError, + updateState, + options: pollOptions, + setDelay: (pollIntervalInMs) => { + currentPollIntervalInMs = pollIntervalInMs; + }, + setErrorAsResult: !resolveOnUnsuccessful + }); + await handleProgressEvents(); + if (!resolveOnUnsuccessful) { + switch (state.status) { + case "canceled": + throw new Error(cancelErrMsg); + case "failed": + throw state.error; + } + } + } + }; + return poller; + }; + } + exports2.buildCreatePoller = buildCreatePoller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/http/poller.js +var require_poller2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var operation_js_1 = require_operation2(); + var poller_js_1 = require_poller(); + async function createHttpPoller(lro, options) { + const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; + return (0, poller_js_1.buildCreatePoller)({ + getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, + getStatusFromPollResponse: operation_js_1.getOperationStatus, + isOperationError: operation_js_1.isOperationError, + getOperationLocation: operation_js_1.getOperationLocation, + getResourceLocation: operation_js_1.getResourceLocation, + getPollingInterval: operation_js_1.parseRetryAfter, + getError: operation_js_1.getErrorFromResponse, + resolveOnUnsuccessful + })({ + init: async () => { + const response = await lro.sendInitialRequest(); + const config = (0, operation_js_1.inferLroMode)({ + rawResponse: response.rawResponse, + requestPath: lro.requestPath, + requestMethod: lro.requestMethod, + resourceLocationConfig + }); + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); + }, + poll: lro.sendPollRequest + }, { + intervalInMs, + withOperationLocation, + restoreFrom, + updateState, + processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse + }); + } + exports2.createHttpPoller = createHttpPoller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js +var require_operation3 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.GenericPollOperation = void 0; + var operation_js_1 = require_operation2(); + var logger_js_1 = require_logger2(); + var createStateProxy = () => ({ + initState: (config) => ({ config, isStarted: true }), + setCanceled: (state) => state.isCancelled = true, + setError: (state, error3) => state.error = error3, + setResult: (state, result) => state.result = result, + setRunning: (state) => state.isStarted = true, + setSucceeded: (state) => state.isCompleted = true, + setFailed: () => { + }, + getError: (state) => state.error, + getResult: (state) => state.result, + isCanceled: (state) => !!state.isCancelled, + isFailed: (state) => !!state.error, + isRunning: (state) => !!state.isStarted, + isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) + }); + var GenericPollOperation = class { + constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.setErrorAsResult = setErrorAsResult; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + async update(options) { + var _a; + const stateProxy = createStateProxy(); + if (!this.state.isStarted) { + this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult + })); + } + const updateState = this.updateState; + const isDone = this.isDone; + if (!this.state.isCompleted && this.state.error === void 0) { + await (0, operation_js_1.pollHttpOperation)({ + lro: this.lro, + state: this.state, + stateProxy, + processResult: this.processResult, + updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, + isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, + options, + setDelay: (intervalInMs) => { + this.pollerConfig.intervalInMs = intervalInMs; + }, + setErrorAsResult: this.setErrorAsResult + }); + } + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); + return this; + } + async cancel() { + logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state + }); + } + }; + exports2.GenericPollOperation = GenericPollOperation; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js +var require_poller3 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; + var PollerStoppedError = class _PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, _PollerStoppedError.prototype); + } + }; + exports2.PollerStoppedError = PollerStoppedError; + var PollerCancelledError = class _PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, _PollerCancelledError.prototype); + } + }; + exports2.PollerCancelledError = PollerCancelledError; + var Poller = class { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve2, reject) => { + this.resolve = resolve2; + this.reject = reject; + }); + this.promise.catch(() => { + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling(pollOptions = {}) { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(pollOptions); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this) + }); + } + this.processUpdatedState(); + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = void 0; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + processUpdatedState() { + if (this.operation.state.error) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + this.reject(this.operation.state.error); + throw this.operation.state.error; + } + } + if (this.operation.state.isCancelled) { + this.stopped = true; + if (!this.resolveOnUnsuccessful) { + const error3 = new PollerCancelledError("Operation was canceled"); + this.reject(error3); + throw error3; + } + } + if (this.isDone() && this.resolve) { + this.resolve(this.getResult()); + } + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone(pollOptions = {}) { + if (this.stopped) { + this.startPolling(pollOptions).catch(this.reject); + } + this.processUpdatedState(); + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } + }; + exports2.Poller = Poller; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js +var require_lroEngine = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var operation_js_1 = require_operation3(); + var constants_js_1 = require_constants17(); + var poller_js_1 = require_poller3(); + var operation_js_2 = require_operation(); + var LroEngine = class extends poller_js_1.Poller { + constructor(lro, options) { + const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; + const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; + const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); + super(operation); + this.resolveOnUnsuccessful = resolveOnUnsuccessful; + this.config = { intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve2) => setTimeout(() => resolve2(), this.config.intervalInMs)); + } + }; + exports2.LroEngine = LroEngine; + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js +var require_lroEngine2 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.LroEngine = void 0; + var lroEngine_js_1 = require_lroEngine(); + Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { + return lroEngine_js_1.LroEngine; + } }); + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js +var require_pollOperation = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// node_modules/@azure/core-lro/dist/commonjs/index.js +var require_commonjs14 = __commonJS({ + "node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createHttpPoller = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var poller_js_1 = require_poller2(); + Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { + return poller_js_1.createHttpPoller; + } }); + tslib_1.__exportStar(require_lroEngine2(), exports2); + tslib_1.__exportStar(require_poller3(), exports2); + tslib_1.__exportStar(require_pollOperation(), exports2); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js +var require_BlobStartCopyFromUrlPoller = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBeginCopyFromUrlPoller = void 0; + var core_util_1 = require_commonjs4(); + var core_lro_1 = require_commonjs14(); + var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { + intervalInMs; + constructor(options) { + const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation({ + ...state, + blobClient, + copySource, + startCopyFromURLOptions + }); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return (0, core_util_1.delay)(this.intervalInMs); + } + }; + exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; + var cancel = async function cancel2(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + }; + var update = async function update2(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { + options.fireProgress(state); + } else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); + }; + var toString2 = function toString3() { + return JSON.stringify({ state: this.state }, (key, value) => { + if (key === "blobClient") { + return void 0; + } + return value; + }); + }; + function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: { ...state }, + cancel, + toString: toString2, + update + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/Range.js +var require_Range = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.rangeToString = rangeToString; + function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js +var require_Batch = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Batch = void 0; + var events_1 = require("events"); + var BatchStates; + (function(BatchStates2) { + BatchStates2[BatchStates2["Good"] = 0] = "Good"; + BatchStates2[BatchStates2["Error"] = 1] = "Error"; + })(BatchStates || (BatchStates = {})); + var Batch = class { + /** + * Concurrency. Must be lager than 0. + */ + concurrency; + /** + * Number of active operations under execution. + */ + actives = 0; + /** + * Number of completed operations under execution. + */ + completed = 0; + /** + * Offset of next operation to be executed. + */ + offset = 0; + /** + * Operation array to be executed. + */ + operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + state = BatchStates.Good; + /** + * A private emitter used to pass events inside this class. + */ + emitter; + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events_1.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } catch (error3) { + this.emitter.emit("error", error3); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve2, reject) => { + this.emitter.on("finish", resolve2); + this.emitter.on("error", (error3) => { + this.state = BatchStates.Error; + reject(error3); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } else { + return; + } + } + } + }; + exports2.Batch = Batch; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js +var require_utils7 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.fsCreateReadStream = exports2.fsStat = void 0; + exports2.streamToBuffer = streamToBuffer; + exports2.streamToBuffer2 = streamToBuffer2; + exports2.streamToBuffer3 = streamToBuffer3; + exports2.readStreamToLocalFile = readStreamToLocalFile; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); + var node_fs_1 = tslib_1.__importDefault(require("node:fs")); + var node_util_1 = tslib_1.__importDefault(require("node:util")); + var constants_js_1 = require_constants15(); + async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; + const count = end - offset; + return new Promise((resolve2, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); + stream.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve2(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve2(); + }); + stream.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); + } + async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; + const bufferSize = buffer.length; + return new Promise((resolve2, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve2(pos); + }); + stream.on("error", reject); + }); + } + async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve2, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); + }); + readableStream.on("end", () => { + resolve2(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); + } + async function readStreamToLocalFile(rs, file) { + return new Promise((resolve2, reject) => { + const ws = node_fs_1.default.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve2); + rs.pipe(ws); + }); + } + exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); + exports2.fsCreateReadStream = node_fs_1.default.createReadStream; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/Clients.js +var require_Clients = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_auth_1 = require_commonjs7(); + var core_util_1 = require_commonjs4(); + var core_util_2 = require_commonjs4(); + var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); + var BlobQueryResponse_js_1 = require_BlobQueryResponse(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var models_js_1 = require_models2(); + var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); + var Pipeline_js_1 = require_Pipeline(); + var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); + var Range_js_1 = require_Range(); + var StorageClient_js_1 = require_StorageClient(); + var Batch_js_1 = require_Batch(); + var storage_common_1 = require_commonjs13(); + var constants_js_1 = require_constants15(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var utils_js_1 = require_utils7(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { + /** + * blobContext provided by protocol layer. + */ + blobContext; + _name; + _containerName; + _versionId; + _snapshot; + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + options = options || {}; + let pipeline; + let url; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); + this.blobContext = this.storageClientContext.blob; + this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); + this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```ts snippet:ReadmeSampleDownloadBlob_Node + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody + * const downloadBlockBlobResponse = await blobClient.download(); + * if (downloadBlockBlobResponse.readableStreamBody) { + * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); + * console.log(`Downloaded blob content: ${downloaded}`); + * } + * + * async function streamToString(stream: NodeJS.ReadableStream): Promise { + * const result = await new Promise>((resolve, reject) => { + * const chunks: Buffer[] = []; + * stream.on("data", (data) => { + * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); + * }); + * stream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * stream.on("error", reject); + * }); + * return result.toString(); + * } + * ``` + * + * Example usage (browser): + * + * ```ts snippet:ReadmeSampleDownloadBlob_Browser + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Get blob content from position 0 to the end + * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + * const downloadBlockBlobResponse = await blobClient.download(); + * const blobBody = await downloadBlockBlobResponse.blobBody; + * if (blobBody) { + * const downloaded = await blobBody.text(); + * console.log(`Downloaded blob content: ${downloaded}`); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress + // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedRes = { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; + if (!core_util_1.isNodeLike) { + return wrappedRes; + } + if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { + options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === void 0) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: options.conditions?.tagConditions + }, + range: (0, Range_js_1.rangeToString)({ + count: offset + res.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return (await this.blobContext.download({ + abortSignal: options.abortSignal, + ...updatedDownloadOptions + })).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + }); + }); + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + }); + return true; + } catch (e) { + if (e.statusCode === 404) { + return false; + } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { + return true; + } + throw e; + } + }); + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + })); + return { + ...res, + _response: res._response, + // _response is made non-enumerable + objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) + }; + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + try { + const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "BlobNotFound") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } + throw e; + } + }); + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions, + tags: (0, utils_common_js_1.toBlobTags)(tags) + })); + }); + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} + }; + return wrappedResponse; + }); + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob + * + * ```ts snippet:ClientsBeginCopyFromURL + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobClient = containerClient.getBlobClient(blobName); + * + * // Example using automatic polling + * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); + * const automaticResult = await automaticCopyPoller.pollUntilDone(); + * + * // Example using manual polling + * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); + * while (!manualCopyPoller.isDone()) { + * await manualCopyPoller.poll(); + * } + * const manualResult = manualCopyPoller.getResult(); + * + * // Example using progress updates + * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * }, + * }); + * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); + * + * // Example using a changing polling interval (default 15 seconds) + * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { + * intervalInMs: 1000, // poll blob every 1 second for copy progress + * }); + * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); + * + * // Example using copy cancellation: + * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); + * // cancel operation after starting it. + * try { + * await cancelCopyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * cancelCopyPoller.getResult(); + * } catch (err: any) { + * if (err.name === "PollerCancelledError") { + * console.log("The copy was cancelled."); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args) + }; + const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - case "ResourceLocation": { - return getLocationHeader(rawResponse); + let blockSize = options.blockSize ?? 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); } - case "Body": - default: { - return void 0; + if (blockSize === 0) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } - } - } - exports2.getOperationLocation = getOperationLocation; - function getOperationStatus({ rawResponse }, state) { - var _a; - const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; - switch (mode) { - case "OperationLocation": { - return getStatus(rawResponse); + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); } - case "ResourceLocation": { - return toOperationStatus(rawResponse.statusCode); + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); } - case "Body": { - return getProvisioningState(rawResponse); + if (!options.conditions) { + options.conditions = {}; } - default: - throw new Error(`Internal error: Unexpected operation mode: ${mode}`); - } - } - exports2.getOperationStatus = getOperationStatus; - function accessBodyProperty({ flatResponse, rawResponse }, prop) { - var _a, _b; - return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; - } - function getResourceLocation(res, state) { - const loc = accessBodyProperty(res, "resourceLocation"); - if (loc && typeof loc === "string") { - state.config.resourceLocation = loc; - } - return state.config.resourceLocation; - } - exports2.getResourceLocation = getResourceLocation; - function isOperationError(e) { - return e.name === "RestError"; - } - exports2.isOperationError = isOperationError; - async function pollHttpOperation(inputs) { - const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult } = inputs; - return (0, operation_js_1.pollOperation)({ - state, - stateProxy, - setDelay, - processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, - getError: getErrorFromResponse, - updateState, - getPollingInterval: parseRetryAfter, - getOperationLocation, - getOperationStatus, - isOperationError, - getResourceLocation, - options, - /** - * The expansion here is intentional because `lro` could be an object that - * references an inner this, so we need to preserve a reference to it. - */ - poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), - setErrorAsResult - }); - } - exports2.pollHttpOperation = pollHttpOperation; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/poller/poller.js -var require_poller = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.buildCreatePoller = void 0; - var operation_js_1 = require_operation(); - var constants_js_1 = require_constants17(); - var core_util_1 = require_commonjs4(); - var createStateProxy = () => ({ - /** - * The state at this point is created to be of type OperationState. - * It will be updated later to be of type TState when the - * customer-provided callback, `updateState`, is called during polling. - */ - initState: (config) => ({ status: "running", config }), - setCanceled: (state) => state.status = "canceled", - setError: (state, error3) => state.error = error3, - setResult: (state, result) => state.result = result, - setRunning: (state) => state.status = "running", - setSucceeded: (state) => state.status = "succeeded", - setFailed: (state) => state.status = "failed", - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => state.status === "canceled", - isFailed: (state) => state.status === "failed", - isRunning: (state) => state.status === "running", - isSucceeded: (state) => state.status === "succeeded" - }); - function buildCreatePoller(inputs) { - const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; - return async ({ init, poll }, options) => { - const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; - const stateProxy = createStateProxy(); - const withOperationLocation = withOperationLocationCallback ? /* @__PURE__ */ (() => { - let called = false; - return (operationLocation, isUpdated) => { - if (isUpdated) - withOperationLocationCallback(operationLocation); - else if (!called) - withOperationLocationCallback(operationLocation); - called = true; - }; - })() : void 0; - const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ - init, - stateProxy, - processResult, - getOperationStatus: getStatusFromInitialResponse, - withOperationLocation, - setErrorAsResult: !resolveOnUnsuccessful - }); - let resultPromise; - const abortController = new AbortController(); - const handlers = /* @__PURE__ */ new Map(); - const handleProgressEvents = async () => handlers.forEach((h) => h(state)); - const cancelErrMsg = "Operation was canceled"; - let currentPollIntervalInMs = intervalInMs; - const poller = { - getOperationState: () => state, - getResult: () => state.result, - isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), - isStopped: () => resultPromise === void 0, - stopPolling: () => { - abortController.abort(); - }, - toString: () => JSON.stringify({ - state - }), - onProgress: (callback) => { - const s = /* @__PURE__ */ Symbol(); - handlers.set(s, callback); - return () => handlers.delete(s); - }, - pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { - const { abortSignal: inputAbortSignal } = pollOptions || {}; - function abortListener() { - abortController.abort(); - } - const abortSignal = abortController.signal; - if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { - abortController.abort(); - } else if (!abortSignal.aborted) { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { + if (!count) { + const response = await this.getProperties({ + ...options, + tracingOptions: updatedOptions.tracingOptions + }); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } + } + if (!buffer) { try { - if (!poller.isDone()) { - await poller.poll({ abortSignal }); - while (!poller.isDone()) { - await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); - await poller.poll({ abortSignal }); - } - } - } finally { - inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); + buffer = Buffer.alloc(count); + } catch (error3) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error3.message}`); } - if (resolveOnUnsuccessful) { - return poller.getResult(); - } else { - switch (state.status) { - case "succeeded": - return poller.getResult(); - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - case "notStarted": - case "running": - throw new Error(`Polling completed without succeeding or failing`); + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch_js_1.Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + blockSize) { + batch.addOperation(async () => { + let chunkEnd = offset + count; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; } - } - })().finally(() => { - resultPromise = void 0; - }), - async poll(pollOptions) { - if (resolveOnUnsuccessful) { - if (poller.isDone()) - return; - } else { - switch (state.status) { - case "succeeded": - return; - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + }); + const stream = response.readableStreamBody; + await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); } - } - await (0, operation_js_1.pollOperation)({ - poll, - state, - stateProxy, - getOperationLocation, - isOperationError, - withOperationLocation, - getPollingInterval, - getOperationStatus: getStatusFromPollResponse, - getResourceLocation, - processResult, - getError, - updateState, - options: pollOptions, - setDelay: (pollIntervalInMs) => { - currentPollIntervalInMs = pollIntervalInMs; - }, - setErrorAsResult: !resolveOnUnsuccessful }); - await handleProgressEvents(); - if (!resolveOnUnsuccessful) { - switch (state.status) { - case "canceled": - throw new Error(cancelErrMsg); - case "failed": - throw state.error; - } - } } - }; - return poller; - }; - } - exports2.buildCreatePoller = buildCreatePoller; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/http/poller.js -var require_poller2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var operation_js_1 = require_operation2(); - var poller_js_1 = require_poller(); - async function createHttpPoller(lro, options) { - const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; - return (0, poller_js_1.buildCreatePoller)({ - getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, - getStatusFromPollResponse: operation_js_1.getOperationStatus, - isOperationError: operation_js_1.isOperationError, - getOperationLocation: operation_js_1.getOperationLocation, - getResourceLocation: operation_js_1.getResourceLocation, - getPollingInterval: operation_js_1.parseRetryAfter, - getError: operation_js_1.getErrorFromResponse, - resolveOnUnsuccessful - })({ - init: async () => { - const response = await lro.sendInitialRequest(); - const config = (0, operation_js_1.inferLroMode)({ - rawResponse: response.rawResponse, - requestPath: lro.requestPath, - requestMethod: lro.requestMethod, - resourceLocationConfig - }); - return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); - }, - poll: lro.sendPollRequest - }, { - intervalInMs, - withOperationLocation, - restoreFrom, - updateState, - processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse - }); - } - exports2.createHttpPoller = createHttpPoller; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js -var require_operation3 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.GenericPollOperation = void 0; - var operation_js_1 = require_operation2(); - var logger_js_1 = require_logger2(); - var createStateProxy = () => ({ - initState: (config) => ({ config, isStarted: true }), - setCanceled: (state) => state.isCancelled = true, - setError: (state, error3) => state.error = error3, - setResult: (state, result) => state.result = result, - setRunning: (state) => state.isStarted = true, - setSucceeded: (state) => state.isCompleted = true, - setFailed: () => { - }, - getError: (state) => state.error, - getResult: (state) => state.result, - isCanceled: (state) => !!state.isCancelled, - isFailed: (state) => !!state.error, - isRunning: (state) => !!state.isStarted, - isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error) - }); - var GenericPollOperation = class { - constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { - this.state = state; - this.lro = lro; - this.setErrorAsResult = setErrorAsResult; - this.lroResourceLocationConfig = lroResourceLocationConfig; - this.processResult = processResult; - this.updateState = updateState; - this.isDone = isDone; - } - setPollerConfig(pollerConfig) { - this.pollerConfig = pollerConfig; + await batch.do(); + return buffer; + }); } - async update(options) { - var _a; - const stateProxy = createStateProxy(); - if (!this.state.isStarted) { - this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1.initHttpOperation)({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult - })); - } - const updateState = this.updateState; - const isDone = this.isDone; - if (!this.state.isCompleted && this.state.error === void 0) { - await (0, operation_js_1.pollHttpOperation)({ - lro: this.lro, - state: this.state, - stateProxy, - processResult: this.processResult, - updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : void 0, - isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : void 0, - options, - setDelay: (intervalInMs) => { - this.pollerConfig.intervalInMs = intervalInMs; - }, - setErrorAsResult: this.setErrorAsResult + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, { + ...options, + tracingOptions: updatedOptions.tracingOptions }); - } - (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); - return this; + if (response.readableStreamBody) { + await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); + } + response.blobDownloadStream = void 0; + return response; + }); } - async cancel() { - logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); - return this; + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } else { + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } catch (error3) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } } /** - * Serializes the Poller operation. + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - toString() { - return JSON.stringify({ - state: this.state + async startCopyFromURL(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions + })); }); } - }; - exports2.GenericPollOperation = GenericPollOperation; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js -var require_poller3 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Poller = exports2.PollerCancelledError = exports2.PollerStoppedError = void 0; - var PollerStoppedError = class _PollerStoppedError extends Error { - constructor(message) { - super(message); - this.name = "PollerStoppedError"; - Object.setPrototypeOf(this, _PollerStoppedError.prototype); - } - }; - exports2.PollerStoppedError = PollerStoppedError; - var PollerCancelledError = class _PollerCancelledError extends Error { - constructor(message) { - super(message); - this.name = "PollerCancelledError"; - Object.setPrototypeOf(this, _PollerCancelledError.prototype); - } - }; - exports2.PollerCancelledError = PollerCancelledError; - var Poller = class { /** - * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * Only available for BlobClient constructed with a shared key credential. * - * When writing an implementation of a Poller, this implementation needs to deal with the initialization - * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's - * operation has already been defined, at least its basic properties. The code below shows how to approach - * the definition of the constructor of a new custom poller. + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * ```ts - * export class MyPoller extends Poller { - * constructor({ - * // Anything you might need outside of the basics - * }) { - * let state: MyOperationState = { - * privateProperty: private, - * publicProperty: public, - * }; + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * - * const operation = { - * state, - * update, - * cancel, - * toString - * } + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve2) => { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).toString(); + resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + }); + } + /** + * Only available for BlobClient constructed with a shared key credential. * - * // Sending the operation to the parent's constructor. - * super(operation); + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * // You can assign more local properties here. - * } - * } - * ``` + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * - * Inside of this constructor, a new promise is created. This will be used to - * tell the user when the poller finishes (see `pollUntilDone()`). The promise's - * resolve and reject methods are also used internally to control when to resolve - * or reject anyone waiting for the poller to finish. + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, this.credential).stringToSign; + } + /** * - * The constructor of a custom implementation of a poller is where any serialized version of - * a previous poller's operation should be deserialized into the operation sent to the - * base constructor. For example: + * Generates a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * ```ts - * export class MyPoller extends Poller { - * constructor( - * baseOperation: string | undefined - * ) { - * let state: MyOperationState = {}; - * if (baseOperation) { - * state = { - * ...JSON.parse(baseOperation).state, - * ...state - * }; - * } - * const operation = { - * state, - * // ... - * } - * super(operation); - * } - * } - * ``` + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas * - * @param operation - Must contain the basic properties of `PollOperation`. + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - constructor(operation) { - this.resolveOnUnsuccessful = false; - this.stopped = true; - this.pollProgressCallbacks = []; - this.operation = operation; - this.promise = new Promise((resolve2, reject) => { - this.resolve = resolve2; - this.reject = reject; - }); - this.promise.catch(() => { + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve2) => { + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** - * Starts a loop that will break only if the poller is done - * or if the poller is stopped. + * Only available for BlobClient constructed with a shared key credential. + * + * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async startPolling(pollOptions = {}) { - if (this.stopped) { - this.stopped = false; - } - while (!this.isStopped() && !this.isDone()) { - await this.poll(pollOptions); - await this.delay(); - } + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + blobName: this._name, + snapshotTime: this._snapshot, + versionId: this._versionId, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** - * pollOnce does one polling, by calling to the update method of the underlying - * poll operation to make any relevant change effective. + * Delete the immutablility policy on the blob. * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Set immutability policy on the blob. * - * @param options - Optional properties passed to the operation's update method. + * @param options - Optional options to set immutability policy on the blob. */ - async pollOnce(options = {}) { - if (!this.isDone()) { - this.operation = await this.operation.update({ - abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) - }); - } - this.processUpdatedState(); + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * fireProgress calls the functions passed in via onProgress the method of the poller. + * Set legal hold on the blob. * - * It loops over all of the callbacks received from onProgress, and executes them, sending them - * the current operation state. + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * - * @param state - The current operation state. + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - fireProgress(state) { - for (const callback of this.pollProgressCallbacks) { - callback(state); + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + }; + exports2.BlobClient = BlobClient; + var AppendBlobClient = class _AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + appendBlobContext; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url; + options = options || {}; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url = urlOrConnectionString; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url, pipeline); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** - * Invokes the underlying operation's cancel method. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - async cancelOnce(options = {}) { - this.operation = await this.operation.cancel(options); + withSnapshot(snapshot) { + return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** - * Returns a promise that will resolve once a single polling request finishes. - * It does this by calling the update method of the Poller's operation. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * @param options - Options to the Append Block Create operation. * - * @param options - Optional properties passed to the operation's update method. + * + * Example usage: + * + * ```ts snippet:ClientsCreateAppendBlob + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * const appendBlobClient = containerClient.getAppendBlobClient(blobName); + * await appendBlobClient.create(); + * ``` */ - poll(options = {}) { - if (!this.pollOncePromise) { - this.pollOncePromise = this.pollOnce(options); - const clearPollOncePromise = () => { - this.pollOncePromise = void 0; - }; - this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); - } - return this.pollOncePromise; + async create(options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); } - processUpdatedState() { - if (this.operation.state.error) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - this.reject(this.operation.state.error); - throw this.operation.state.error; - } - } - if (this.operation.state.isCancelled) { - this.stopped = true; - if (!this.resolveOnUnsuccessful) { - const error3 = new PollerCancelledError("Operation was canceled"); - this.reject(error3); - throw error3; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + try { + const res = (0, utils_common_js_1.assertResponse)(await this.create({ + ...updatedOptions, + conditions + })); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "BlobAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } + throw e; } - } - if (this.isDone() && this.resolve) { - this.resolve(this.getResult()); - } + }); } /** - * Returns a promise that will resolve once the underlying operation is completed. + * Seals the append blob, making it read only. + * + * @param options - */ - async pollUntilDone(pollOptions = {}) { - if (this.stopped) { - this.startPolling(pollOptions).catch(this.reject); - } - this.processUpdatedState(); - return this.promise; + async seal(options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://learn.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```ts snippet:ClientsAppendBlock + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Invokes the provided callback after each polling is completed, - * sending the current state of the poller's operation. + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url * - * It returns a method that can be used to stop receiving updates on the given callback function. + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - */ - onProgress(callback) { - this.pollProgressCallbacks.push(callback); - return () => { - this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); - }; + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); } + }; + exports2.AppendBlobClient = AppendBlobClient; + var BlockBlobClient = class _BlockBlobClient extends BlobClient { /** - * Returns true if the poller has finished polling. + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. */ - isDone() { - const state = this.operation.state; - return Boolean(state.isCompleted || state.isCancelled || state.error); - } + _blobContext; /** - * Stops the poller from continuing to poll. + * blockBlobContext provided by protocol layer. */ - stopPolling() { - if (!this.stopped) { - this.stopped = true; - if (this.reject) { - this.reject(new PollerStoppedError("This poller is already stopped")); + blockBlobContext; + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url; + options = options || {}; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url, pipeline); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } /** - * Returns true if the poller is stopped. - */ - isStopped() { - return this.stopped; - } - /** - * Attempts to cancel the underlying operation. - * - * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. - * - * If it's called again before it finishes, it will throw an error. + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. * - * @param options - Optional properties passed to the operation's update method. + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - cancelOperation(options = {}) { - if (!this.cancelPromise) { - this.cancelPromise = this.cancelOnce(options); - } else if (options.abortSignal) { - throw new Error("A cancel request is currently pending"); - } - return this.cancelPromise; + withSnapshot(snapshot) { + return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** - * Returns the state of the operation. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * Even though TState will be the same type inside any of the methods of any extension of the Poller class, - * implementations of the pollers can customize what's shared with the public by writing their own - * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller - * and a public type representing a safe to share subset of the properties of the internal state. - * Their definition of getOperationState can then return their public type. + * Quick query for a JSON or CSV formatted blob. * - * Example: + * Example usage (Node.js): * - * ```ts - * // Let's say we have our poller's operation state defined as: - * interface MyOperationState extends PollOperationState { - * privateProperty?: string; - * publicProperty?: string; - * } + * ```ts snippet:ClientsQuery + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * // To allow us to have a true separation of public and private state, we have to define another interface: - * interface PublicState extends PollOperationState { - * publicProperty?: string; - * } + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * // Then, we define our Poller as follows: - * export class MyPoller extends Poller { - * // ... More content is needed here ... + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * public getOperationState(): PublicState { - * const state: PublicState = this.operation.state; - * return { - * // Properties from PollOperationState - * isStarted: state.isStarted, - * isCompleted: state.isCompleted, - * isCancelled: state.isCancelled, - * error: state.error, - * result: state.result, + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); + * if (queryBlockBlobResponse.readableStreamBody) { + * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); + * const downloaded = downloadedBuffer.toString(); + * console.log(`Query blob content: ${downloaded}`); + * } * - * // The only other property needed by PublicState. - * publicProperty: state.publicProperty - * } - * } + * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { + * return new Promise((resolve, reject) => { + * const chunks: Buffer[] = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); * } * ``` * - * You can see this in the tests of this repository, go to the file: - * `../test/utils/testPoller.ts` - * and look for the getOperationState implementation. - */ - getOperationState() { - return this.operation.state; - } - /** - * Returns the result value of the operation, - * regardless of the state of the poller. - * It can return undefined or an incomplete form of the final TResult value - * depending on the implementation. - */ - getResult() { - const state = this.operation.state; - return state.result; - } - /** - * Returns a serialized version of the poller's operation - * by invoking the operation's toString method. - */ - toString() { - return this.operation.toString(); - } - }; - exports2.Poller = Poller; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js -var require_lroEngine = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var operation_js_1 = require_operation3(); - var constants_js_1 = require_constants17(); - var poller_js_1 = require_poller3(); - var operation_js_2 = require_operation(); - var LroEngine = class extends poller_js_1.Poller { - constructor(lro, options) { - const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; - const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; - const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); - super(operation); - this.resolveOnUnsuccessful = resolveOnUnsuccessful; - this.config = { intervalInMs }; - operation.setPollerConfig(this.config); - } - /** - * The method used by the poller to wait before attempting to update its operation. + * @param query - + * @param options - */ - delay() { - return new Promise((resolve2) => setTimeout(() => resolve2(), this.config.intervalInMs)); - } - }; - exports2.LroEngine = LroEngine; - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js -var require_lroEngine2 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.LroEngine = void 0; - var lroEngine_js_1 = require_lroEngine(); - Object.defineProperty(exports2, "LroEngine", { enumerable: true, get: function() { - return lroEngine_js_1.LroEngine; - } }); - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js -var require_pollOperation = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/core-lro/dist/commonjs/index.js -var require_commonjs14 = __commonJS({ - "node_modules/@azure/core-lro/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createHttpPoller = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var poller_js_1 = require_poller2(); - Object.defineProperty(exports2, "createHttpPoller", { enumerable: true, get: function() { - return poller_js_1.createHttpPoller; - } }); - tslib_1.__exportStar(require_lroEngine2(), exports2); - tslib_1.__exportStar(require_poller3(), exports2); - tslib_1.__exportStar(require_pollOperation(), exports2); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js -var require_BlobStartCopyFromUrlPoller = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/pollers/BlobStartCopyFromUrlPoller.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBeginCopyFromUrlPoller = void 0; - var core_util_1 = require_commonjs4(); - var core_lro_1 = require_commonjs14(); - var BlobBeginCopyFromUrlPoller = class extends core_lro_1.Poller { - intervalInMs; - constructor(options) { - const { blobClient, copySource, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; + async query(query, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + if (!core_util_1.isNodeLike) { + throw new Error("This operation currently is only supported in Node.js."); } - const operation = makeBlobBeginCopyFromURLPollOperation({ - ...state, - blobClient, - copySource, - startCopyFromURLOptions + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), + outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions + })); + return new BlobQueryResponse_js_1.BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + }); }); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); - } - this.intervalInMs = intervalInMs; - } - delay() { - return (0, core_util_1.delay)(this.intervalInMs); - } - }; - exports2.BlobBeginCopyFromUrlPoller = BlobBeginCopyFromUrlPoller; - var cancel = async function cancel2(options = {}) { - const state = this.state; - const { copyId } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - }; - var update = async function update2(options = {}) { - const state = this.state; - const { blobClient, copySource, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - } else if (!state.isCompleted) { - try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { - options.fireProgress(state); - } else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; - } - } catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); - }; - var toString2 = function toString3() { - return JSON.stringify({ state: this.state }, (key, value) => { - if (key === "blobClient") { - return void 0; - } - return value; - }); - }; - function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: { ...state }, - cancel, - toString: toString2, - update - }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/Range.js -var require_Range = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/Range.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.rangeToString = rangeToString; - function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } - return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js -var require_Batch = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/Batch.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Batch = void 0; - var events_1 = require("events"); - var BatchStates; - (function(BatchStates2) { - BatchStates2[BatchStates2["Good"] = 0] = "Good"; - BatchStates2[BatchStates2["Error"] = 1] = "Error"; - })(BatchStates || (BatchStates = {})); - var Batch = class { - /** - * Concurrency. Must be lager than 0. - */ - concurrency; - /** - * Number of active operations under execution. - */ - actives = 0; - /** - * Number of completed operations under execution. - */ - completed = 0; /** - * Offset of next operation to be executed. + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```ts snippet:ClientsUpload + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); + * + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - offset = 0; + async upload(body, contentLength, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); + } /** - * Operation array to be executed. + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. */ - operations = []; + async syncUploadFromURL(sourceURL, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { + ...options, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + copySourceTags: options.copySourceTags, + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } /** - * States of Batch. When an error happens, state will turn into error. - * Batch will stop execute left operations. + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. */ - state = BatchStates.Good; + async stageBlock(blockId, body, contentLength, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { + onUploadProgress: options.onProgress + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } /** - * A private emitter used to pass events inside this class. + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. */ - emitter; + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } /** - * Creates an instance of Batch. - * @param concurrency - + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://learn.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. */ - constructor(concurrency = 5) { - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); - } - this.concurrency = concurrency; - this.emitter = new events_1.EventEmitter(); + async commitBlockList(blocks, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + tracingOptions: updatedOptions.tracingOptions + })); + }); } /** - * Add a operation into queue. + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://learn.microsoft.com/rest/api/storageservices/get-block-list * - * @param operation - + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } catch (error3) { - this.emitter.emit("error", error3); + async getBlockList(listType, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + tracingOptions: updatedOptions.tracingOptions + })); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; } + return res; }); } + // High level functions /** - * Start execute operations in the queue. + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve2, reject) => { - this.emitter.on("finish", resolve2); - this.emitter.on("error", (error3) => { - this.state = BatchStates.Error; - reject(error3); - }); + async uploadData(data, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (core_util_1.isNodeLike) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } }); } /** - * Get next operation to be executed. Return null when reaching ends. + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; - } - return null; + async uploadBrowserData(browserData, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); } /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; - } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; + async uploadSeekableInternal(bodyFactory, size, options = {}) { + let blockSize = options.blockSize ?? 0; + if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } else { - return; - } + const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } - } - }; - exports2.Batch = Batch; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js -var require_utils7 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/utils.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.fsCreateReadStream = exports2.fsStat = void 0; - exports2.streamToBuffer = streamToBuffer; - exports2.streamToBuffer2 = streamToBuffer2; - exports2.streamToBuffer3 = streamToBuffer3; - exports2.readStreamToLocalFile = readStreamToLocalFile; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var node_fs_1 = tslib_1.__importDefault(require("node:fs")); - var node_util_1 = tslib_1.__importDefault(require("node:util")); - var constants_js_1 = require_constants15(); - async function streamToBuffer(stream, buffer, offset, end, encoding) { - let pos = 0; - const count = end - offset; - return new Promise((resolve2, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), constants_js_1.REQUEST_TIMEOUT); - stream.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve2(); - return; - } - let chunk = stream.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + if (blockSize === 0) { + if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); } - resolve2(); - }); - stream.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); - } - async function streamToBuffer2(stream, buffer, encoding) { - let pos = 0; - const bufferSize = buffer.length; - return new Promise((resolve2, reject) => { - stream.on("readable", () => { - let chunk = stream.read(); - if (!chunk) { - return; + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; + const numBlocks = Math.floor((size - 1) / blockSize) + 1; + if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream.on("end", () => { - resolve2(pos); - }); - stream.on("error", reject); - }); - } - async function streamToBuffer3(readableStream, encoding) { - return new Promise((resolve2, reject) => { - const chunks = []; - readableStream.on("data", (data) => { - chunks.push(typeof data === "string" ? Buffer.from(data, encoding) : data); - }); - readableStream.on("end", () => { - resolve2(Buffer.concat(chunks)); - }); - readableStream.on("error", reject); - }); - } - async function readStreamToLocalFile(rs, file) { - return new Promise((resolve2, reject) => { - const ws = node_fs_1.default.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); - }); - ws.on("close", resolve2); - rs.pipe(ws); - }); - } - exports2.fsStat = node_util_1.default.promisify(node_fs_1.default.stat); - exports2.fsCreateReadStream = node_fs_1.default.createReadStream; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/Clients.js -var require_Clients = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/Clients.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PageBlobClient = exports2.BlockBlobClient = exports2.AppendBlobClient = exports2.BlobClient = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var core_auth_1 = require_commonjs7(); - var core_util_1 = require_commonjs4(); - var core_util_2 = require_commonjs4(); - var BlobDownloadResponse_js_1 = require_BlobDownloadResponse(); - var BlobQueryResponse_js_1 = require_BlobQueryResponse(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var models_js_1 = require_models2(); - var PageBlobRangeResponse_js_1 = require_PageBlobRangeResponse(); - var Pipeline_js_1 = require_Pipeline(); - var BlobStartCopyFromUrlPoller_js_1 = require_BlobStartCopyFromUrlPoller(); - var Range_js_1 = require_Range(); - var StorageClient_js_1 = require_StorageClient(); - var Batch_js_1 = require_Batch(); - var storage_common_1 = require_commonjs13(); - var constants_js_1 = require_constants15(); - var tracing_js_1 = require_tracing(); - var utils_common_js_1 = require_utils_common(); - var utils_js_1 = require_utils7(); - var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - var BlobLeaseClient_js_1 = require_BlobLeaseClient(); - var BlobClient = class _BlobClient extends StorageClient_js_1.StorageClient { + const blockList = []; + const blockIDPrefix = (0, core_util_2.randomUUID)(); + let transferProgress = 0; + const batch = new Batch_js_1.Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + }); + } /** - * blobContext provided by protocol layer. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - blobContext; - _name; - _containerName; - _versionId; - _snapshot; + async uploadFile(filePath, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { + const size = (await (0, utils_js_1.fsStat)(filePath)).size; + return this.uploadSeekableInternal((offset, count) => { + return () => (0, utils_js_1.fsCreateReadStream)(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }, size, { + ...options, + tracingOptions: updatedOptions.tracingOptions + }); + }); + } /** - * The name of the blob. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. */ - get name() { - return this._name; + async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { + let blockNum = 0; + const blockIDPrefix = (0, core_util_2.randomUUID)(); + let transferProgress = 0; + const blockList = []; + const scheduler = new storage_common_1.BufferScheduler( + stream, + bufferSize, + maxConcurrency, + async (body, length) => { + const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil(maxConcurrency / 4 * 3) + ); + await scheduler.do(); + return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + }); } + }; + exports2.BlockBlobClient = BlockBlobClient; + var PageBlobClient = class _PageBlobClient extends BlobClient { /** - * The name of the storage container the blob is associated with. + * pageBlobsContext provided by protocol layer. */ - get containerName() { - return this._containerName; - } + pageBlobContext; constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - options = options || {}; let pipeline; let url; + options = options || {}; if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; @@ -104395,9 +104187,6 @@ var require_Clients = __commonJS({ pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; @@ -104424,306 +104213,71 @@ var require_Clients = __commonJS({ throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = this.storageClientContext.blob; - this._snapshot = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT); - this._versionId = (0, utils_common_js_1.getURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID); + this.pageBlobContext = this.storageClientContext.pageBlob; } /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { - return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); - } - /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. - * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. - */ - withVersion(versionId) { - return new _BlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.VERSIONID, versionId.length === 0 ? void 0 : versionId), this.pipeline); - } - /** - * Creates a AppendBlobClient object. - * - */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); - } - /** - * Creates a BlockBlobClient object. - * - */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); - } - /** - * Creates a PageBlobClient object. - * - */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. - * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob - * - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. - * - * - * Example usage (Node.js): - * - * ```ts snippet:ReadmeSampleDownloadBlob_Node - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); - * - * // Get blob content from position 0 to the end - * // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody - * const downloadBlockBlobResponse = await blobClient.download(); - * if (downloadBlockBlobResponse.readableStreamBody) { - * const downloaded = await streamToString(downloadBlockBlobResponse.readableStreamBody); - * console.log(`Downloaded blob content: ${downloaded}`); - * } - * - * async function streamToString(stream: NodeJS.ReadableStream): Promise { - * const result = await new Promise>((resolve, reject) => { - * const chunks: Buffer[] = []; - * stream.on("data", (data) => { - * chunks.push(Buffer.isBuffer(data) ? data : Buffer.from(data)); - * }); - * stream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * stream.on("error", reject); - * }); - * return result.toString(); - * } - * ``` - * - * Example usage (browser): - * - * ```ts snippet:ReadmeSampleDownloadBlob_Browser - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * - * // Get blob content from position 0 to the end - * // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody - * const downloadBlockBlobResponse = await blobClient.download(); - * const blobBody = await downloadBlockBlobResponse.blobBody; - * if (blobBody) { - * const downloaded = await blobBody.text(); - * console.log(`Downloaded blob content: ${downloaded}`); - * } - * ``` + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. */ - async download(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; + async create(size, options = {}) { options.conditions = options.conditions || {}; (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.download({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, + metadata: options.metadata, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - requestOptions: { - onDownloadProgress: core_util_1.isNodeLike ? void 0 : options.onProgress - // for Node.js, progress is reported by RetriableReadableStream - }, - range: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, + immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, + legalHold: options.legalHold, + tier: (0, models_js_1.toAccessTier)(options.tier), + blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), tracingOptions: updatedOptions.tracingOptions })); - const wrappedRes = { - ...res, - _response: res._response, - // _response is made non-enumerable - objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) - }; - if (!core_util_1.isNodeLike) { - return wrappedRes; - } - if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = constants_js_1.DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; - } - if (res.contentLength === void 0) { - throw new RangeError(`File download response doesn't contain valid content length header`); - } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); - } - return new BlobDownloadResponse_js_1.BlobDownloadResponse(wrappedRes, async (start) => { - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - return (await this.blobContext.download({ - abortSignal: options.abortSignal, - ...updatedDownloadOptions - })).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress - }); }); } /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * - * @param options - options to Exists operation. + * @param size - size of the page blob. + * @param options - */ - async exists(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + async createIfNotExists(size, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { try { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, + const conditions = { ifNoneMatch: constants_js_1.ETagAny }; + const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { + ...options, + conditions, tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - return false; - } else if (e.statusCode === 409 && (e.details.errorCode === constants_js_1.BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === constants_js_1.BlobDoesNotUseCustomerSpecifiedEncryption)) { - return true; - } - throw e; - } - }); - } - /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. - * - * @param options - Optional options to Get Properties operation. - */ - async getProperties(options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blobContext.getProperties({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - })); - return { - ...res, - _response: res._response, - // _response is made non-enumerable - objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(res.objectReplicationRules) - }; - }); - } - /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob - * - * @param options - Optional options to Blob Delete operation. - */ - async delete(options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.delete({ - abortSignal: options.abortSignal, - deleteSnapshots: options.deleteSnapshots, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob - * - * @param options - Optional options to Blob Delete operation. - */ - async deleteIfExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { - try { - const res = (0, utils_common_js_1.assertResponse)(await this.delete(updatedOptions)); + })); return { succeeded: true, ...res, @@ -104731,7 +104285,7 @@ var require_Clients = __commonJS({ // _response is made non-enumerable }; } catch (e) { - if (e.details?.errorCode === "BlobNotFound") { + if (e.details?.errorCode === "BlobAlreadyExists") { return { succeeded: false, ...e.response?.parsedHeaders, @@ -104743,76 +104297,100 @@ var require_Clients = __commonJS({ }); } /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://learn.microsoft.com/rest/api/storageservices/undelete-blob + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page * - * @param options - Optional options to Blob Undelete operation. + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - async undelete(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.undelete({ + async uploadPages(body, offset, count, options = {}) { + options.conditions = options.conditions || {}; + (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + requestOptions: { + onUploadProgress: options.onProgress + }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets system properties on the blob. - * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setHttpHeaders({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { abortSignal: options.abortSignal, - blobHttpHeaders: blobHTTPHeaders, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions?.ifMatch, + sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), + fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. - * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-metadata + * Frees the specified pages from the page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/put-page * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - async setMetadata(metadata, options = {}) { + async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setMetadata({ + return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions @@ -104820,103 +104398,302 @@ var require_Clients = __commonJS({ }); } /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param tags - - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - async setTags(tags, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTags({ + async getPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - tracingOptions: updatedOptions.tracingOptions, - tags: (0, utils_common_js_1.toBlobTags)(tags) + range: (0, Range_js_1.rangeToString)({ offset, count }), + tracingOptions: updatedOptions.tracingOptions })); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); } /** - * Gets the tags associated with the underlying blob. + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async getTags(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.blobContext.getTags({ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, + range: (0, Range_js_1.rangeToString)({ offset, count }), + marker, + maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); - const wrappedResponse = { - ...response, - _response: response._response, - // _response is made non-enumerable - tags: (0, utils_common_js_1.toTags)({ blobTagSet: response.blobTagSet }) || {} - }; - return wrappedResponse; }); } /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } } /** - * Creates a read-only snapshot of a blob. - * @see https://learn.microsoft.com/rest/api/storageservices/snapshot-blob + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param options - Optional options to the Blob Create Snapshot operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - async createSnapshot(options = {}) { + async *listPageRangeItems(offset = 0, count, options = {}) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * ```ts snippet:ClientsListPageBlobs + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const blobName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); + * + * // Example using `for await` syntax + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRanges(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); + * } + * + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * + * // Example using paging with a marker + * i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.createSnapshot({ + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); + } + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + const result = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, - metadata: options.metadata, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, + prevsnapshot: prevSnapshot, + range: (0, Range_js_1.rangeToString)({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result); }); } /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options?.abortSignal, + leaseAccessConditions: options?.conditions, + modifiedAccessConditions: { + ...options?.conditions, + ifTags: options?.conditions?.tagConditions + }, + prevsnapshot: prevSnapshotOrUrl, + range: (0, Range_js_1.rangeToString)({ + offset, + count + }), + marker, + maxPageSize: options?.maxPageSize, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * - * ```ts snippet:ClientsBeginCopyFromURL + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === void 0) { + do { + getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield await getPageRangeItemSegmentsResponse; + } while (marker); + } + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + let marker; + for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { + yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + } + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * ```ts snippet:ClientsListPageBlobsDiff * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -104929,486 +104706,843 @@ var require_Clients = __commonJS({ * const containerName = ""; * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobClient = containerClient.getBlobClient(blobName); - * - * // Example using automatic polling - * const automaticCopyPoller = await blobClient.beginCopyFromURL("url"); - * const automaticResult = await automaticCopyPoller.pollUntilDone(); + * const pageBlobClient = containerClient.getPageBlobClient(blobName); * - * // Example using manual polling - * const manualCopyPoller = await blobClient.beginCopyFromURL("url"); - * while (!manualCopyPoller.isDone()) { - * await manualCopyPoller.poll(); + * const offset = 0; + * const count = 1024; + * const previousSnapshot = ""; + * // Example using `for await` syntax + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * const manualResult = manualCopyPoller.getResult(); * - * // Example using progress updates - * const progressUpdatesCopyPoller = await blobClient.beginCopyFromURL("url", { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); - * }, - * }); - * const progressUpdatesResult = await progressUpdatesCopyPoller.pollUntilDone(); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); + * ({ value, done } = await iter.next()); + * } * - * // Example using a changing polling interval (default 15 seconds) - * const pollingIntervalCopyPoller = await blobClient.beginCopyFromURL("url", { - * intervalInMs: 1000, // poll blob every 1 second for copy progress - * }); - * const pollingIntervalResult = await pollingIntervalCopyPoller.pollUntilDone(); + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 20 })) { + * for (const pageRange of page.pageRange || []) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } * - * // Example using copy cancellation: - * const cancelCopyPoller = await blobClient.beginCopyFromURL("url"); - * // cancel operation after starting it. - * try { - * await cancelCopyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * cancelCopyPoller.getResult(); - * } catch (err: any) { - * if (err.name === "PollerCancelledError") { - * console.log("The copy was cancelled."); + * // Example using paging with a marker + * i = 1; + * let iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = pageBlobClient + * .listPageRangesDiff(offset, count, previousSnapshot) + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 page ranges + * if (response.pageRange) { + * for (const pageRange of response.pageRange) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - async beginCopyFromURL(copySource, options = {}) { - const client = { - abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), - getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { + ...options + }); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...options + }); + } }; - const poller = new BlobStartCopyFromUrlPoller_js_1.BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + prevSnapshotUrl, + range: (0, Range_js_1.rangeToString)({ offset, count }), + tracingOptions: updatedOptions.tracingOptions + })); + return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); }); - await poller.poll(); - return poller; } /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://learn.microsoft.com/rest/api/storageservices/abort-copy-blob + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. */ - async abortCopyFromURL(copyId, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.abortCopyFromURL(copyId, { + async resize(size, options = {}) { + options.conditions = options.conditions || {}; + return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ...options.conditions, + ifTags: options.conditions?.tagConditions + }, + encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob-from-url + * Sets a page blob's sequence number. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. */ - async syncCopyFromURL(copySource, options = {}) { + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - return tracing_js_1.tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.copyFromURL(copySource, { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { abortSignal: options.abortSignal, - metadata: options.metadata, + blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, - sourceContentMD5: options.sourceContentMD5, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - encryptionScope: options.encryptionScope, - copySourceTags: options.copySourceTags, - fileRequestIntent: options.sourceShareTokenIntent, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-tier + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. */ - async setAccessTier(tier, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setTier((0, models_js_1.toAccessTier)(tier), { + async startCopyIncremental(copySource, options = {}) { + return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, modifiedAccessConditions: { ...options.conditions, ifTags: options.conditions?.tagConditions }, - rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions })); }); } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; + }; + exports2.PageBlobClient = PageBlobClient; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js +var require_BatchUtils = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getBodyAsText = getBodyAsText; + exports2.utf8ByteLength = utf8ByteLength; + var utils_js_1 = require_utils7(); + var constants_js_1 = require_constants15(); + async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); + } + function utf8ByteLength(str2) { + return Buffer.byteLength(str2); + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js +var require_BatchResponseParser = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BatchResponseParser = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_http_compat_1 = require_commonjs9(); + var constants_js_1 = require_constants15(); + var BatchUtils_js_1 = require_BatchUtils(); + var log_js_1 = require_log6(); + var HTTP_HEADER_DELIMITER = ": "; + var SPACE_DELIMITER = " "; + var NOT_FOUND = -1; + var BatchResponseParser = class { + batchResponse; + responseBatchBoundary; + perResponsePrefix; + batchResponseEnding; + subRequests; + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); + const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); + const subResponseCount = subResponses.length; + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); + const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; + } + if (responseLine.trim() === "") { + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; + } + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } else { + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } + if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } else { + log_js_1.logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount, + subResponsesFailedCount + }; + } + }; + exports2.BatchResponseParser = BatchResponseParser; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js +var require_Mutex = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Mutex = void 0; + var MutexLockStatus; + (function(MutexLockStatus2) { + MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; + })(MutexLockStatus || (MutexLockStatus = {})); + var Mutex = class { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve2) => { + if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve2(); + } else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve2(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve2) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve2(); + }); + } + static keys = {}; + static listeners = {}; + static onUnlockEvent(key, handler2) { + if (this.listeners[key] === void 0) { + this.listeners[key] = [handler2]; } else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; + this.listeners[key].push(handler2); } - let blockSize = options.blockSize ?? 0; - if (blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { + const handler2 = this.listeners[key].shift(); + setImmediate(() => { + handler2.call(this); + }); } - if (blockSize === 0) { - blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + }; + exports2.Mutex = Mutex; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js +var require_BlobBatch = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatch = void 0; + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_2 = require_commonjs4(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var Clients_js_1 = require_Clients(); + var Mutex_js_1 = require_Mutex(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var core_xml_1 = require_commonjs10(); + var constants_js_1 = require_constants15(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var tracing_js_1 = require_tracing(); + var core_client_1 = require_commonjs8(); + var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); + var BlobBatch = class { + batchRequest; + batch = "batch"; + batchType; + constructor() { + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex_js_1.Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } finally { + await Mutex_js_1.Mutex.unlock(this.batch); } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); } - if (!options.conditions) { - options.conditions = {}; + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { + url = urlOrBlobClient; + credential = credentialOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); } - return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { - if (!count) { - const response = await this.getProperties({ - ...options, - tracingOptions: updatedOptions.tracingOptions - }); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + if (!options) { + options = {}; + } + return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url, + credential + }, async () => { + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + }); + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url, + credential + }, async () => { + await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + }); + } + }; + exports2.BlobBatch = BlobBatch; + var InnerBatchRequest = class { + operationCount; + body; + subRequests; + boundary; + subRequestPrefix; + multipartContentType; + batchRequestEnding; + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = (0, core_util_1.randomUUID)(); + this.boundary = `batch_${tempGuid}`; + this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = /* @__PURE__ */ new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); + corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ + stringifyXML: core_xml_1.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#" } } - if (!buffer) { - try { - buffer = Buffer.alloc(count); - } catch (error3) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error3.message}`); + }), { phase: "Serialize" }); + corePipeline.addPolicy(batchHeaderFilterPolicy()); + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if ((0, core_auth_1.isTokenCredential)(credential)) { + corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential, + scopes: constants_js_1.StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } + }), { phase: "Sign" }); + } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { + corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ + accountName: credential.accountName, + accountKey: credential.accountKey + }), { phase: "Sign" }); + } + const pipeline = new Pipeline_js_1.Pipeline([]); + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; + } + appendSubRequestToBody(request2) { + this.body += [ + this.subRequestPrefix, + // sub request constant prefix + `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + // sub request's content ID + "", + // empty line after sub request's content ID + `${request2.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request2.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` + // sub request start line with method + ].join(constants_js_1.HTTP_LINE_ENDING); + for (const [name, value] of request2.headers) { + this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; + } + this.body += constants_js_1.HTTP_LINE_ENDING; + } + preAddSubRequest(subRequest) { + if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); + } + const path4 = (0, utils_common_js_1.getURLPath)(subRequest.url); + if (!path4 || path4 === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } + }; + function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request2) { + batchRequest.appendSubRequestToBody(request2); + return { + request: request2, + status: 200, + headers: (0, core_rest_pipeline_1.createHttpHeaders)() + }; + } + }; + } + function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request2, next) { + let xMsHeaderName = ""; + for (const [name] of request2.headers) { + if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; } } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch_js_1.Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + blockSize) { - batch.addOperation(async () => { - let chunkEnd = offset + count; - if (off + blockSize < chunkEnd) { - chunkEnd = off + blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - }); - const stream = response.readableStreamBody; - await (0, utils_js_1.streamToBuffer)(stream, buffer, off - offset, chunkEnd - offset); - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); + if (xMsHeaderName !== "") { + request2.headers.delete(xMsHeaderName); } - await batch.do(); - return buffer; - }); + return next(request2); + } + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js +var require_BlobBatchClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobBatchClient = void 0; + var BatchResponseParser_js_1 = require_BatchResponseParser(); + var BatchUtils_js_1 = require_BatchUtils(); + var BlobBatch_js_1 = require_BlobBatch(); + var tracing_js_1 = require_tracing(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageContextClient_js_1 = require_StorageContextClient(); + var Pipeline_js_1 = require_Pipeline(); + var utils_common_js_1 = require_utils_common(); + var BlobBatchClient = class { + serviceOrContainerContext; + constructor(url, credentialOrPipeline, options) { + let pipeline; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (!credentialOrPipeline) { + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); + } + const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); + const path4 = (0, utils_common_js_1.getURLPath)(url); + if (path4 && path4 !== "/") { + this.serviceOrContainerContext = storageClientContext.container; + } else { + this.serviceOrContainerContext = storageClientContext.service; + } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. - * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { - const response = await this.download(offset, count, { - ...options, - tracingOptions: updatedOptions.tracingOptions - }); - if (response.readableStreamBody) { - await (0, utils_js_1.readStreamToLocalFile)(response.readableStreamBody, filePath); - } - response.blobDownloadStream = void 0; - return response; - }); + createBatch() { + return new BlobBatch_js_1.BlobBatch(); } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - const parsedUrl = new URL(this.url); - if (parsedUrl.host.split(".")[1] === "blob") { - const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { - const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { + const batch = new BlobBatch_js_1.BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); } else { - const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } - return { blobName, containerName }; - } catch (error3) { - throw new Error("Unable to extract blobName and containerName with provided information."); } + return this.submitBatch(batch); } - /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/copy-blob - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. - */ - async startCopyFromURL(copySource, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - return (0, utils_common_js_1.assertResponse)(await this.blobContext.startCopyFromURL(copySource, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - rehydratePriority: options.rehydratePriority, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - sealBlob: options.sealBlob, - tracingOptions: updatedOptions.tracingOptions - })); - }); + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + const batch = new BlobBatch_js_1.BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); } /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. + * Submit batch request which consists of multiple subrequests. * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasUrl(options) { - return new Promise((resolve2) => { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, this.credential).toString(); - resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); - }); - } - /** - * Only available for BlobClient constructed with a shared key credential. + * Example usage: * - * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * ```ts snippet:BlobBatchClientSubmitBatch + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, this.credential).stringToSign; - } - /** + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); * - * Generates a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * const batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob("", credential); + * await batchRequest.deleteBlob("", credential, { + * deleteSnapshots: "include", + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * Example using a lease: * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve2) => { - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, userDelegationKey, this.accountName).toString(); - resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); - }); - } - /** - * Only available for BlobClient constructed with a shared key credential. + * ```ts snippet:BlobBatchClientSubmitBatchWithLease + * import { DefaultAzureCredential } from "@azure/identity"; + * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; * - * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the input user delegation key. + * const account = ""; + * const credential = new DefaultAzureCredential(); + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * credential, + * ); * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blobBatchClient = containerClient.getBlobBatchClient(); + * const blobClient = containerClient.getBlobClient(""); * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasStringToSign(options, userDelegationKey) { - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - blobName: this._name, - snapshotTime: this._snapshot, - versionId: this._versionId, - ...options - }, userDelegationKey, this.accountName).stringToSign; - } - /** - * Delete the immutablility policy on the blob. + * const batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blobClient, "Cool"); + * await batchRequest.setBlobAccessTier(blobClient, "Cool", { + * conditions: { leaseId: "" }, + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` * - * @param options - Optional options to delete immutability policy on the blob. - */ - async deleteImmutabilityPolicy(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.deleteImmutabilityPolicy({ - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * Set immutability policy on the blob. + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * - * @param options - Optional options to set immutability policy on the blob. + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - */ - async setImmutabilityPolicy(immutabilityPolicy, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setImmutabilityPolicy({ - immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, - immutabilityPolicyMode: immutabilityPolicy.policyMode, - tracingOptions: updatedOptions.tracingOptions + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { + const batchRequestBody = batchRequest.getHttpRequestBody(); + const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { + ...updatedOptions })); + const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return res; }); } + }; + exports2.BlobBatchClient = BlobBatchClient; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js +var require_ContainerClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ContainerClient = void 0; + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var core_auth_1 = require_commonjs7(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var Pipeline_js_1 = require_Pipeline(); + var StorageClient_js_1 = require_StorageClient(); + var tracing_js_1 = require_tracing(); + var utils_common_js_1 = require_utils_common(); + var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); + var BlobLeaseClient_js_1 = require_BlobLeaseClient(); + var Clients_js_1 = require_Clients(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var ContainerClient = class extends StorageClient_js_1.StorageClient { /** - * Set legal hold on the blob. - * - * @param options - Optional options to set legal hold on the blob. + * containerContext provided by protocol layer. */ - async setLegalHold(legalHoldEnabled, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.setLegalHold(legalHoldEnabled, { - tracingOptions: updatedOptions.tracingOptions - })); - }); - } + containerContext; + _containerName; /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. + * The name of the container. */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blobContext.getAccountInfo({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); + get containerName() { + return this._containerName; } - }; - exports2.BlobClient = BlobClient; - var AppendBlobClient = class _AppendBlobClient extends BlobClient { - /** - * appendBlobsContext provided by protocol layer. - */ - appendBlobContext; - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { let pipeline; let url; options = options || {}; @@ -105417,19 +105551,17 @@ var require_Clients = __commonJS({ pipeline = credentialOrPipelineOrContainerName; } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { url = urlOrConnectionString; - options = blobNameOrOptions; pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url = urlOrConnectionString; pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (core_util_1.isNodeLike) { const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); } @@ -105437,39 +105569,133 @@ var require_Clients = __commonJS({ } else { throw new Error("Account connection string is only supported in Node.js environment"); } - } else if (extractedCreds.kind === "SASConnString") { - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } else if (extractedCreds.kind === "SASConnString") { + url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = this.storageClientContext.container; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://learn.microsoft.com/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```ts snippet:ContainerClientCreate + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); + }); + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://learn.microsoft.com/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * + * @param options - + */ + async createIfNotExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + try { + const res = await this.create(updatedOptions); + return { + succeeded: true, + ...res, + _response: res._response + // _response is made non-enumerable + }; + } catch (e) { + if (e.details?.errorCode === "ContainerAlreadyExists") { + return { + succeeded: false, + ...e.response?.parsedHeaders, + _response: e.response + }; + } else { + throw e; + } } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - this.appendBlobContext = this.storageClientContext.appendBlob; + }); } /** - * Creates a new AppendBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Returns true if the Azure container resource represented by this client exists; false otherwise. * - * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - */ - withSnapshot(snapshot) { - return new _AppendBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + async exists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + }); + return true; + } catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Creates a {@link BlobClient} * - * @param options - Options to the Append Block Create operation. + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name * * * Example usage: * - * ```ts snippet:ClientsCreateAppendBlob + * ```ts snippet:ClientsUpload * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -105482,57 +105708,85 @@ var require_Clients = __commonJS({ * const containerName = ""; * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); + * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * const appendBlobClient = containerClient.getAppendBlobClient(blobName); - * await appendBlobClient.create(); + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ - async create(options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.create(0, { + getBlockBlobClient(blobName) { + return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ + abortSignal: options.abortSignal, + ...options.conditions, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), + modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-container * - * @param options - + * @param options - Options to Container Delete operation. */ - async createIfNotExists(options = {}) { - const conditions = { ifNoneMatch: constants_js_1.ETagAny }; - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + async deleteIfExists(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { try { - const res = (0, utils_common_js_1.assertResponse)(await this.create({ - ...updatedOptions, - conditions - })); + const res = await this.delete(updatedOptions); return { succeeded: true, ...res, _response: res._response - // _response is made non-enumerable }; } catch (e) { - if (e.details?.errorCode === "BlobAlreadyExists") { + if (e.details?.errorCode === "ContainerNotFound") { return { succeeded: false, ...e.response?.parsedHeaders, @@ -105544,207 +105798,318 @@ var require_Clients = __commonJS({ }); } /** - * Seals the append blob, making it read only. + * Sets one or more user-defined name-value pairs for the specified container. * - * @param options - + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. */ - async seal(options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.seal({ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, - appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, + metadata, + modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://learn.microsoft.com/rest/api/storageservices/append-block - * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. - * - * - * Example usage: - * - * ```ts snippet:ClientsAppendBlock - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * const content = "Hello World!"; + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(blobName); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(blobName); - * await existingAppendBlobClient.appendBlock(content, content.length); - * ``` + * @param options - Options to Container Get Access Policy operation. */ - async appendBlock(body, contentLength, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlock(contentLength, body, { + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, - appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (const identifier of response) { + let accessPolicy = void 0; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id + }); + } + return res; }); } /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://learn.microsoft.com/rest/api/storageservices/append-block-from-url + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block - * @param options - + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" + }, + id: identifier.id + }); + } + return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, - sourceRange: (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, + access, + containerAcl: acl, leaseAccessConditions: options.conditions, - appendPositionAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - fileRequestIntent: options.sourceShareTokenIntent, + modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } - }; - exports2.AppendBlobClient = AppendBlobClient; - var BlockBlobClient = class _BlockBlobClient extends BlobClient { /** - * blobContext provided by protocol layer. + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API - * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. */ - _blobContext; + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + } /** - * blockBlobContext provided by protocol layer. + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ - blockBlobContext; - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url; - options = options || {}; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response + }; + }); + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); } - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); + return blobClient.delete(updatedOptions); + }); + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }) + } + }; + return wrappedResponse; + }); + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { + marker, + ...options, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: { + ...response._response, + parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) + }, + // _response is made non-enumerable + segment: { + ...response.segment, + blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = { + ...blobItemInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), + tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), + objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) + }; + return blobItem; + }), + blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { + const blobPrefix = { + ...blobPrefixInternal, + name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) + }; + return blobPrefix; + }) } - } else if (extractedCreds.kind === "SASConnString") { - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url, pipeline); - this.blockBlobContext = this.storageClientContext.blockBlob; - this._blobContext = this.storageClientContext.blob; + }; + return wrappedResponse; + }); } /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. */ - withSnapshot(snapshot) { - return new _BlockBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + async *listSegments(marker, options = {}) { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield await listBlobsFlatSegmentResponse; + } while (marker); + } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. + * Returns an AsyncIterableIterator of {@link BlobItem} objects * - * Quick query for a JSON or CSV formatted blob. + * @param options - Options to list blobs operation. + */ + async *listItems(options = {}) { + let marker; + for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { + yield* listBlobsFlatSegmentResponse.segment.blobItems; + } + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. * - * Example usage (Node.js): + * .byPage() returns an async iterable iterator to list the blobs in pages. * - * ```ts snippet:ClientsQuery + * ```ts snippet:ReadmeSampleListBlobs_Multiple * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -105755,87 +106120,175 @@ var require_Clients = __commonJS({ * ); * * const containerName = ""; - * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select from BlobStorage"); - * if (queryBlockBlobResponse.readableStreamBody) { - * const downloadedBuffer = await streamToBuffer(queryBlockBlobResponse.readableStreamBody); - * const downloaded = downloadedBuffer.toString(); - * console.log(`Query blob content: ${downloaded}`); + * // Example using `for await` syntax + * let i = 1; + * const blobs = containerClient.listBlobsFlat(); + * for await (const blob of blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * - * async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise { - * return new Promise((resolve, reject) => { - * const chunks: Buffer[] = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsFlat(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of page.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` * - * @param query - - * @param options - + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. */ - async query(query, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - if (!core_util_1.isNodeLike) { - throw new Error("This operation currently is only supported in Node.js."); + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this._blobContext.query({ - abortSignal: options.abortSignal, - queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.inputTextConfiguration), - outputSerialization: (0, utils_common_js_1.toQuerySerialization)(options.outputTextConfiguration) - }, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - tracingOptions: updatedOptions.tracingOptions - })); - return new BlobQueryResponse_js_1.BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - }); + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); + } + }; } /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + async *listHierarchySegments(delimiter, marker, options = {}) { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === void 0) { + do { + listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield await listBlobsHierarchySegmentResponse; + } while (marker); + } + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + async *listItemsByHierarchy(delimiter, options = {}) { + let marker; + for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield { + kind: "prefix", + ...prefix + }; + } + } + for (const blob of segment.blobItems) { + yield { kind: "blob", ...blob }; + } + } + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. * - * Example usage: + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * - * ```ts snippet:ClientsUpload + * ```ts snippet:ReadmeSampleListBlobsByHierarchy * import { BlobServiceClient } from "@azure/storage-blob"; * import { DefaultAzureCredential } from "@azure/identity"; * @@ -105846,6927 +106299,6271 @@ var require_Clients = __commonJS({ * ); * * const containerName = ""; - * const blobName = ""; * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blockBlobClient = containerClient.getBlockBlobClient(blobName); * - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` - */ - async upload(body, contentLength, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.upload(contentLength, body, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * // Example using `for await` syntax + * let i = 1; + * const blobs = containerClient.listBlobsByHierarchy("/"); + * for await (const blob of blobs) { + * if (blob.kind === "prefix") { + * console.log(`\tBlobPrefix: ${blob.name}`); + * } else { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. - */ - async syncUploadFromURL(sourceURL, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, { - ...options, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - copySourceTags: options.copySourceTags, - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.listBlobsByHierarchy("/"); + * let { value, done } = await iter.next(); + * while (!done) { + * if (value.kind === "prefix") { + * console.log(`\tBlobPrefix: ${value.name}`); + * } else { + * console.log(`\tBlobItem: name - ${value.name}`); + * } + * ({ value, done } = await iter.next()); + * } * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { + * const segment = page.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of page.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .listBlobsByHierarchy("/") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.blobPrefixes) { + * for (const prefix of response.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * if (response.segment.blobItems) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - async stageBlock(blockId, body, contentLength, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - requestOptions: { - onUploadProgress: options.onProgress - }, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...updatedOptions + }); + } + }; } /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block-from-url + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, - sourceRange: offset === 0 && !count ? void 0 : (0, Range_js_1.rangeToString)({ offset, count }), - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - fileRequestIntent: options.sourceShareTokenIntent, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; + return wrappedResponse; }); } /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://learn.microsoft.com/rest/api/storageservices/put-block-list + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async commitBlockList(blocks, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.commitBlockList({ latest: blocks }, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); - }); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://learn.microsoft.com/rest/api/storageservices/get-block-list + * Returns an AsyncIterableIterator for blobs. * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - async getBlockList(listType, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { - const res = (0, utils_common_js_1.assertResponse)(await this.blockBlobContext.getBlockList(listType, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - tracingOptions: updatedOptions.tracingOptions - })); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; - } - return res; - }); + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; + } } - // High level functions /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. - * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * .byPage() returns an async iterable iterator to list the blobs in pages. * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - - */ - async uploadData(data, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { - if (core_util_1.isNodeLike) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); - } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - }); - } - /** - * ONLY AVAILABLE IN BROWSERS. + * Example using `for await` syntax: * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * ```ts snippet:ReadmeSampleFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * const containerName = ""; + * const containerClient = blobServiceClient.getContainerClient(containerName); * - * @deprecated Use {@link uploadData} instead. + * // Example using `for await` syntax + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. - */ - async uploadBrowserData(browserData, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { - const browserBlob = new Blob([browserData]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - }); - } - /** + * // Example using `iter.next()` syntax + * i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * // Example using `byPage()` syntax + * i = 1; + * for await (const page of containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * // Example using paging with a marker + * i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * // Prints 10 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - let blockSize = options.blockSize ?? 0; - if (blockSize < 0 || blockSize > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - const maxSingleShotSize = options.maxSingleShotSize ?? constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - if (maxSingleShotSize < 0 || maxSingleShotSize > constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${constants_js_1.BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (blockSize === 0) { - if (size > constants_js_1.BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > maxSingleShotSize) { - blockSize = Math.ceil(size / constants_js_1.BLOCK_BLOB_MAX_BLOCKS); - if (blockSize < constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - blockSize = constants_js_1.DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { - if (size <= maxSingleShotSize) { - return (0, utils_common_js_1.assertResponse)(await this.upload(bodyFactory(0, size), size, updatedOptions)); - } - const numBlocks = Math.floor((size - 1) / blockSize) + 1; - if (numBlocks > constants_js_1.BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${constants_js_1.BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = (0, core_util_2.randomUUID)(); - let transferProgress = 0; - const batch = new Batch_js_1.Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, i); - const start = blockSize * i; - const end = i === numBlocks - 1 ? size : start + blockSize; - const contentLength = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += contentLength; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress - }); - } + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = { + ...options + }; + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions }); } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); - }); - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a local file in blocks to a block blob. - * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. - * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadFile(filePath, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { - const size = (await (0, utils_js_1.fsStat)(filePath)).size; - return this.uploadSeekableInternal((offset, count) => { - return () => (0, utils_js_1.fsCreateReadStream)(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, { - ...options, - tracingOptions: updatedOptions.tracingOptions - }); - }); - } - /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. - * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadStream(stream, bufferSize = constants_js_1.DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { - let blockNum = 0; - const blockIDPrefix = (0, core_util_2.randomUUID)(); - let transferProgress = 0; - const blockList = []; - const scheduler = new storage_common_1.BufferScheduler( - stream, - bufferSize, - maxConcurrency, - async (body, length) => { - const blockID = (0, utils_common_js_1.generateBlockID)(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body, length, { - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil(maxConcurrency / 4 * 3) - ); - await scheduler.do(); - return (0, utils_common_js_1.assertResponse)(await this.commitBlockList(blockList, { - ...options, - tracingOptions: updatedOptions.tracingOptions - })); - }); + }; } - }; - exports2.BlockBlobClient = BlockBlobClient; - var PageBlobClient = class _PageBlobClient extends BlobClient { /** - * pageBlobsContext provided by protocol layer. - */ - pageBlobContext; - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url; - options = options || {}; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url = urlOrConnectionString; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url = (0, utils_common_js_1.appendToURLPath)((0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + }); + } + getContainerNameFromUrl() { + let containerName; + try { + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { + containerName = parsedUrl.pathname.split("/")[1]; + } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { + containerName = parsedUrl.pathname.split("/")[2]; } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + containerName = parsedUrl.pathname.split("/")[1]; } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } catch (error3) { + throw new Error("Unable to extract containerName with provided information."); } - super(url, pipeline); - this.pageBlobContext = this.storageClientContext.pageBlob; } /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Only available for ContainerClient constructed with a shared key credential. * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - withSnapshot(snapshot) { - return new _PageBlobClient((0, utils_common_js_1.setURLParameter)(this.url, constants_js_1.URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? void 0 : snapshot), this.pipeline); + generateSasUrl(options) { + return new Promise((resolve2) => { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, this.credential).toString(); + resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); + }); } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Only available for ContainerClient constructed with a shared key credential. * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async create(size, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.create(0, size, { - abortSignal: options.abortSignal, - blobHttpHeaders: options.blobHTTPHeaders, - blobSequenceNumber: options.blobSequenceNumber, - leaseAccessConditions: options.conditions, - metadata: options.metadata, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn, - immutabilityPolicyMode: options.immutabilityPolicy?.policyMode, - legalHold: options.legalHold, - tier: (0, models_js_1.toAccessTier)(options.tier), - blobTagsString: (0, utils_common_js_1.toBlobTagsString)(options.tags), - tracingOptions: updatedOptions.tracingOptions - })); - }); + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + generateSasStringToSign(options) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, this.credential).stringToSign; } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the input user delegation key. * - * @param size - size of the page blob. - * @param options - + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async createIfNotExists(size, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { - try { - const conditions = { ifNoneMatch: constants_js_1.ETagAny }; - const res = (0, utils_common_js_1.assertResponse)(await this.create(size, { - ...options, - conditions, - tracingOptions: updatedOptions.tracingOptions - })); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; - } catch (e) { - if (e.details?.errorCode === "BlobAlreadyExists") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } - throw e; - } + generateUserDelegationSasUrl(options, userDelegationKey) { + return new Promise((resolve2) => { + const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).toString(); + resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); } /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page + * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI + * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async uploadPages(body, offset, count, options = {}) { - options.conditions = options.conditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPages(count, body, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - requestOptions: { - onUploadProgress: options.onProgress - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - sequenceNumberAccessConditions: options.conditions, - transactionalContentMD5: options.transactionalContentMD5, - transactionalContentCrc64: options.transactionalContentCrc64, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + generateUserDelegationSasStringToSign(options, userDelegationKey) { + return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ + containerName: this._containerName, + ...options + }, userDelegationKey, this.accountName).stringToSign; } /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page-from-url + * Creates a BlobBatchClient object to conduct batch operations. * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - (0, models_js_1.ensureCpkIfSpecified)(options.customerProvidedKey, this.isHttps); - return tracing_js_1.tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.uploadPagesFromURL(sourceURL, (0, Range_js_1.rangeToString)({ offset: sourceOffset, count }), 0, (0, Range_js_1.rangeToString)({ offset: destOffset, count }), { - abortSignal: options.abortSignal, - sourceContentMD5: options.sourceContentMD5, - sourceContentCrc64: options.sourceContentCrc64, - leaseAccessConditions: options.conditions, - sequenceNumberAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions?.ifMatch, - sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince - }, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - copySourceAuthorization: (0, utils_common_js_1.httpAuthorizationToString)(options.sourceAuthorization), - fileRequestIntent: options.sourceShareTokenIntent, - tracingOptions: updatedOptions.tracingOptions - })); - }); + getBlobBatchClient() { + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); } + }; + exports2.ContainerClient = ContainerClient; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js +var require_AccountSASPermissions = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASPermissions = void 0; + var AccountSASPermissions = class _AccountSASPermissions { /** - * Frees the specified pages from the page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/put-page + * Parse initializes the AccountSASPermissions fields from a string. * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. + * @param permissions - */ - async clearPages(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.clearPages(0, { - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - sequenceNumberAccessConditions: options.conditions, - cpkInfo: options.customerProvidedKey, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - })); - }); + static parse(permissions) { + const accountSASPermissions = new _AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; } /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. + * @param permissionLike - */ - async getPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - tracingOptions: updatedOptions.tracingOptions - })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); - }); + static from(permissionLike) { + const accountSASPermissions = new _AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; } /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Permission to read resources and list queues and tables granted. + */ + read = false; + /** + * Permission to write resources granted. + */ + write = false; + /** + * Permission to delete blobs and files granted. + */ + delete = false; + /** + * Permission to delete versions granted. + */ + deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add = false; + /** + * Permission to create blobs and files granted. + */ + create = false; + /** + * Permissions to update messages and table entities granted. + */ + update = false; + /** + * Permission to get and delete messages granted. + */ + process = false; + /** + * Specfies Tag access granted. + */ + tag = false; + /** + * Permission to filter blobs. + */ + filter = false; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete = false; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async listPageRangesSegment(offset = 0, count, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRanges({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - range: (0, Range_js_1.rangeToString)({ offset, count }), - marker, - maxPageSize: options.maxPageSize, - tracingOptions: updatedOptions.tracingOptions - })); - }); + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); } + }; + exports2.AccountSASPermissions = AccountSASPermissions; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js +var require_AccountSASResourceTypes = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASResourceTypes = void 0; + var AccountSASResourceTypes = class _AccountSASResourceTypes { /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. + * @param resourceTypes - */ - async *listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - let getPageRangeItemSegmentsResponse; - if (!!marker || marker === void 0) { - do { - getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(offset, count, marker, options); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield await getPageRangeItemSegmentsResponse; - } while (marker); + static parse(resourceTypes) { + const accountSASResourceTypes = new _AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } + return accountSASResourceTypes; } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Permission to access service level APIs granted. + */ + service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object = false; + /** + * Converts the given resource types to a string. + * + * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. */ - async *listPageRangeItems(offset = 0, count, options = {}) { - let marker; - for await (const getPageRangesSegment of this.listPageRangeItemSegments(offset, count, marker, options)) { - yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); } + return resourceTypes.join(""); } + }; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js +var require_AccountSASServices = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.AccountSASServices = void 0; + var AccountSASServices = class _AccountSASServices { /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges - * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. - * - * ```ts snippet:ClientsListPageBlobs - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const pageBlobClient = containerClient.getPageBlobClient(blobName); - * - * // Example using `for await` syntax - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = pageBlobClient.listPageRanges(); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of page.pageRange || []) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * - * // Example using paging with a marker - * i = 1; - * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * ``` + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * @param services - */ - listPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeItems(offset, count, options); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...options - }); + static parse(services) { + const accountSASServices = new _AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); } - }; + } + return accountSASServices; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. + * Permission to access blob resources granted. */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { - const result = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - prevsnapshot: prevSnapshot, - range: (0, Range_js_1.rangeToString)({ offset, count }), - tracingOptions: updatedOptions.tracingOptions - })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(result); - }); - } + blob = false; /** - * getPageRangesDiffSegment returns a single segment of page ranges starting from the - * specified Marker for difference between previous snapshot and the target page blob. - * Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * Permission to access file resources granted. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options?.abortSignal, - leaseAccessConditions: options?.conditions, - modifiedAccessConditions: { - ...options?.conditions, - ifTags: options?.conditions?.tagConditions - }, - prevsnapshot: prevSnapshotOrUrl, - range: (0, Range_js_1.rangeToString)({ - offset, - count - }), - marker, - maxPageSize: options?.maxPageSize, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } + file = false; /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} - * + * Permission to access queue resources granted. + */ + queue = false; + /** + * Permission to access table resources granted. + */ + table = false; + /** + * Converts the given services to a string. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The - * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get - * items. The marker value is opaque to the client. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async *listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { - let getPageRangeItemSegmentsResponse; - if (!!marker || marker === void 0) { - do { - getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options); - marker = getPageRangeItemSegmentsResponse.continuationToken; - yield await getPageRangeItemSegmentsResponse; - } while (marker); + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); } + return services.join(""); + } + }; + exports2.AccountSASServices = AccountSASServices; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js +var require_AccountSASSignatureValues = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var SasIPRange_js_1 = require_SasIPRange(); + var SASQueryParameters_js_1 = require_SASQueryParameters(); + var constants_js_1 = require_constants15(); + var utils_common_js_1 = require_utils_common(); + function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; + } + function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "" + // Account SAS requires an additional newline character + ].join("\n"); + } else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", + (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "" + // Account SAS requires an additional newline character + ].join("\n"); } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return { + sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), + stringToSign + }; + } + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js +var require_BlobServiceClient = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BlobServiceClient = void 0; + var core_auth_1 = require_commonjs7(); + var core_rest_pipeline_1 = require_commonjs6(); + var core_util_1 = require_commonjs4(); + var Pipeline_js_1 = require_Pipeline(); + var ContainerClient_js_1 = require_ContainerClient(); + var utils_common_js_1 = require_utils_common(); + var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); + var AnonymousCredential_js_1 = require_AnonymousCredential(); + var utils_common_js_2 = require_utils_common(); + var tracing_js_1 = require_tracing(); + var BlobBatchClient_js_1 = require_BlobBatchClient(); + var StorageClient_js_1 = require_StorageClient(); + var AccountSASPermissions_js_1 = require_AccountSASPermissions(); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + var AccountSASServices_js_1 = require_AccountSASServices(); + var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { + /** + * serviceContext provided by protocol layer. + */ + serviceContext; /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. */ - async *listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - let marker; - for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)) { - yield* (0, utils_common_js_1.ExtractPageRangeInfoItems)(getPageRangesSegment); + static fromConnectionString(connectionString, options) { + options = options || {}; + const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (core_util_1.isNodeLike) { + const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); + } + const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); + return new _BlobServiceClient(extractedCreds.url, pipeline); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + constructor(url, credentialOrPipeline, options) { + let pipeline; + if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { + pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); + } else { + pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; } /** - * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges - * - * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * - * ```ts snippet:ClientsListPageBlobsDiff - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const pageBlobClient = containerClient.getPageBlobClient(blobName); + * Creates a {@link ContainerClient} object * - * const offset = 0; - * const count = 1024; - * const previousSnapshot = ""; - * // Example using `for await` syntax - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot)) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = pageBlobClient.listPageRangesDiff(offset, count, previousSnapshot); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Page range ${i++}: ${value.start} - ${value.end}`); - * ({ value, done } = await iter.next()); - * } + * Example usage: * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ maxPageSize: 20 })) { - * for (const pageRange of page.pageRange || []) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } + * ```ts snippet:BlobServiceClientGetContainerClient + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; * - * // Example using paging with a marker - * i = 1; - * let iterator = pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = pageBlobClient - * .listPageRangesDiff(offset, count, previousSnapshot) - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 page ranges - * if (response.pageRange) { - * for (const pageRange of response.pageRange) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * } + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * const containerClient = blobServiceClient.getContainerClient(""); * ``` + */ + getContainerClient(containerName) { + return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. */ - listPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, { - ...options + async createContainer(containerName, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse + }; }); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...options - }); - } - }; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. - * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges + * Deletes a Blob container. * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.getPageRangesDiff({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - prevSnapshotUrl, - range: (0, Range_js_1.rangeToString)({ offset, count }), + async deleteContainer(containerName, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(containerName); + return containerClient.delete(updatedOptions); + }); + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, tracingOptions: updatedOptions.tracingOptions })); - return (0, PageBlobRangeResponse_js_1.rangeResponseFromModel)(response); + return { containerClient, containerUndeleteResponse }; }); } /** - * Resizes the page blob to the specified size (which must be a multiple of 512). - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties * - * @param size - Target size - * @param options - Options to the Page Blob Resize operation. - * @returns Response data for the Page Blob Resize operation. + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. */ - async resize(size, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.resize(size, { + async getProperties(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, - encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Sets a page blob's sequence number. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties * - * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. - * @param sequenceNumber - Required if sequenceNumberAction is max or update - * @param options - Options to the Page Blob Update Sequence Number operation. - * @returns Response data for the Page Blob Update Sequence Number operation. + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. */ - async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + async setProperties(properties, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { abortSignal: options.abortSignal, - blobSequenceNumber: sequenceNumber, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, tracingOptions: updatedOptions.tracingOptions })); }); } /** - * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. - * The snapshot is copied such that only the differential changes between the previously - * copied snapshot are transferred to the destination. - * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. - * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://learn.microsoft.com/azure/virtual-machines/windows/incremental-snapshots + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats * - * @param copySource - Specifies the name of the source page blob snapshot. For example, - * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Options to the Page Blob Copy Incremental operation. - * @returns Response data for the Page Blob Copy Incremental operation. + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. */ - async startCopyIncremental(copySource, options = {}) { - return tracing_js_1.tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.pageBlobContext.copyIncremental(copySource, { + async getStatistics(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ abortSignal: options.abortSignal, - modifiedAccessConditions: { - ...options.conditions, - ifTags: options.conditions?.tagConditions - }, tracingOptions: updatedOptions.tracingOptions })); }); } - }; - exports2.PageBlobClient = PageBlobClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js -var require_BatchUtils = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchUtils.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getBodyAsText = getBodyAsText; - exports2.utf8ByteLength = utf8ByteLength; - var utils_js_1 = require_utils7(); - var constants_js_1 = require_constants15(); - async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(constants_js_1.BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await (0, utils_js_1.streamToBuffer2)(batchResponse.readableStreamBody, buffer); - buffer = buffer.slice(0, responseLength); - return buffer.toString(); - } - function utf8ByteLength(str2) { - return Buffer.byteLength(str2); - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js -var require_BatchResponseParser = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchResponseParser.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BatchResponseParser = void 0; - var core_rest_pipeline_1 = require_commonjs6(); - var core_http_compat_1 = require_commonjs9(); - var constants_js_1 = require_constants15(); - var BatchUtils_js_1 = require_BatchUtils(); - var log_js_1 = require_log6(); - var HTTP_HEADER_DELIMITER = ": "; - var SPACE_DELIMITER = " "; - var NOT_FOUND = -1; - var BatchResponseParser = class { - batchResponse; - responseBatchBoundary; - perResponsePrefix; - batchResponseEnding; - subRequests; - constructor(batchResponse, subRequests) { - if (!batchResponse || !batchResponse.contentType) { - throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); - } - if (!subRequests || subRequests.size === 0) { - throw new RangeError("Invalid state: subRequests is not provided or size is 0."); - } - this.batchResponse = batchResponse; - this.subRequests = subRequests; - this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${constants_js_1.HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; - } - // For example of response, please refer to https://learn.microsoft.com/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - if (this.batchResponse._response.status !== constants_js_1.HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await (0, BatchUtils_js_1.getBodyAsText)(this.batchResponse); - const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); - const subResponseCount = subResponses.length; - if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = (0, core_http_compat_1.toHttpHeadersLike)((0, core_rest_pipeline_1.createHttpHeaders)()); - const responseLines = subResponse.split(`${constants_js_1.HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - if (responseLine.startsWith(constants_js_1.HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - if (responseLine.startsWith(constants_js_1.HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; - } - if (responseLine.trim() === "") { - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; - } - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === constants_js_1.HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } - } else { - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; - } - } - if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; - } else { - log_js_1.logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); - } - if (subRespFailed) { - subResponsesFailedCount++; - } else { - subResponsesSucceededCount++; - } - } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount, - subResponsesFailedCount - }; - } - }; - exports2.BatchResponseParser = BatchResponseParser; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js -var require_Mutex = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/utils/Mutex.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Mutex = void 0; - var MutexLockStatus; - (function(MutexLockStatus2) { - MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; - MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; - })(MutexLockStatus || (MutexLockStatus = {})); - var Mutex = class { /** - * Lock for a specific key. If the lock has been acquired by another customer, then - * will wait until getting the lock. + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information * - * @param key - lock key + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - static async lock(key) { - return new Promise((resolve2) => { - if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve2(); - } else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve2(); - }); - } + async getAccountInfo(options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); }); } /** - * Unlock a key. + * Returns a list of the containers under the specified account. + * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 * - * @param key - + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. */ - static async unlock(key) { - return new Promise((resolve2) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve2(); + async listContainersSegment(marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ + abortSignal: options.abortSignal, + marker, + ...options, + include: typeof options.include === "string" ? [options.include] : options.include, + tracingOptions: updatedOptions.tracingOptions + })); }); } - static keys = {}; - static listeners = {}; - static onUnlockEvent(key, handler2) { - if (this.listeners[key] === void 0) { - this.listeners[key] = [handler2]; - } else { - this.listeners[key].push(handler2); - } - } - static emitUnlockEvent(key) { - if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { - const handler2 = this.listeners[key].shift(); - setImmediate(() => { - handler2.call(this); - }); - } - } - }; - exports2.Mutex = Mutex; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js -var require_BlobBatch = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobBatch.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBatch = void 0; - var core_util_1 = require_commonjs4(); - var core_auth_1 = require_commonjs7(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_2 = require_commonjs4(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var Clients_js_1 = require_Clients(); - var Mutex_js_1 = require_Mutex(); - var Pipeline_js_1 = require_Pipeline(); - var utils_common_js_1 = require_utils_common(); - var core_xml_1 = require_commonjs10(); - var constants_js_1 = require_constants15(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var tracing_js_1 = require_tracing(); - var core_client_1 = require_commonjs8(); - var StorageSharedKeyCredentialPolicyV2_js_1 = require_StorageSharedKeyCredentialPolicyV22(); - var BlobBatch = class { - batchRequest; - batch = "batch"; - batchType; - constructor() { - this.batchRequest = new InnerBatchRequest(); - } /** - * Get the value of Content-Type for a batch request. - * The value must be multipart/mixed with a batch boundary. - * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - getMultiPartContentType() { - return this.batchRequest.getMultipartContentType(); + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions + })); + const wrappedResponse = { + ...response, + _response: response._response, + // _response is made non-enumerable + blobs: response.blobs.map((blob) => { + let tagValue = ""; + if (blob.tags?.blobTagSet.length === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; + }) + }; + return wrappedResponse; + }); } /** - * Get assembled HTTP request body for sub requests. + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - getHttpRequestBody() { - return this.batchRequest.getHttpRequestBody(); + async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + let response; + if (!!marker || marker === void 0) { + do { + response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield response; + } while (marker); + } } /** - * Get sub requests that are added into the batch request. + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - getSubRequests() { - return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex_js_1.Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } finally { - await Mutex_js_1.Mutex.unlock(this.batch); - } - } - setBatchType(batchType) { - if (!this.batchType) { - this.batchType = batchType; - } - if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); - } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url; - let credential; - if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrOptions instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrOptions))) { - url = urlOrBlobClient; - credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; - } - return tracing_js_1.tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url, - credential - }, async () => { - await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - }); - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url; - let credential; - let tier; - if (typeof urlOrBlobClient === "string" && (core_util_2.isNodeLike && credentialOrTier instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrTier))) { - url = urlOrBlobClient; - credential = credentialOrTier; - tier = tierOrOptions; - } else if (urlOrBlobClient instanceof Clients_js_1.BlobClient) { - url = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier = credentialOrTier; - options = tierOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); - } - if (!options) { - options = {}; + async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + let marker; + for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { + yield* segment.blobs; } - return tracing_js_1.tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url, - credential - }, async () => { - await new Clients_js_1.BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); - }); - }); - } - }; - exports2.BlobBatch = BlobBatch; - var InnerBatchRequest = class { - operationCount; - body; - subRequests; - boundary; - subRequestPrefix; - multipartContentType; - batchRequestEnding; - constructor() { - this.operationCount = 0; - this.body = ""; - const tempGuid = (0, core_util_1.randomUUID)(); - this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TYPE}: application/http${constants_js_1.HTTP_LINE_ENDING}${constants_js_1.HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; - this.batchRequestEnding = `--${this.boundary}--`; - this.subRequests = /* @__PURE__ */ new Map(); } /** - * Create pipeline to assemble sub requests. The idea here is to use existing - * credential and serialization/deserialization components, with additional policies to - * filter unnecessary headers, assemble sub requests into request's body - * and intercept request from going to wire. - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties + * + * ```ts snippet:BlobServiceClientFindBlobsByTags + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * // Use for await to iterate the blobs + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Use iter.next() to iterate the blobs + * i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Blob ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Use byPage() to iterate the blobs + * i = 1; + * for await (const page of blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ maxPageSize: 20 })) { + * for (const blob of page.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Use paging with a marker + * i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - createPipeline(credential) { - const corePipeline = (0, core_rest_pipeline_1.createEmptyPipeline)(); - corePipeline.addPolicy((0, core_client_1.serializationPolicy)({ - stringifyXML: core_xml_1.stringifyXML, - serializerOptions: { - xml: { - xmlCharKey: "#" - } + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = { + ...options + }; + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } - }), { phase: "Serialize" }); - corePipeline.addPolicy(batchHeaderFilterPolicy()); - corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); - if ((0, core_auth_1.isTokenCredential)(credential)) { - corePipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ - credential, - scopes: constants_js_1.StorageOAuthScopes, - challengeCallbacks: { authorizeRequestOnChallenge: core_client_1.authorizeRequestOnTenantChallenge } - }), { phase: "Sign" }); - } else if (credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential) { - corePipeline.addPolicy((0, StorageSharedKeyCredentialPolicyV2_js_1.storageSharedKeyCredentialPolicy)({ - accountName: credential.accountName, - accountKey: credential.accountKey - }), { phase: "Sign" }); - } - const pipeline = new Pipeline_js_1.Pipeline([]); - pipeline._credential = credential; - pipeline._corePipeline = corePipeline; - return pipeline; + }; } - appendSubRequestToBody(request2) { - this.body += [ - this.subRequestPrefix, - // sub request constant prefix - `${constants_js_1.HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - // sub request's content ID - "", - // empty line after sub request's content ID - `${request2.method.toString()} ${(0, utils_common_js_1.getURLPathAndQuery)(request2.url)} ${constants_js_1.HTTP_VERSION_1_1}${constants_js_1.HTTP_LINE_ENDING}` - // sub request start line with method - ].join(constants_js_1.HTTP_LINE_ENDING); - for (const [name, value] of request2.headers) { - this.body += `${name}: ${value}${constants_js_1.HTTP_LINE_ENDING}`; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + async *listSegments(marker, options = {}) { + let listContainersSegmentResponse; + if (!!marker || marker === void 0) { + do { + listContainersSegmentResponse = await this.listContainersSegment(marker, options); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield await listContainersSegmentResponse; + } while (marker); } - this.body += constants_js_1.HTTP_LINE_ENDING; } - preAddSubRequest(subRequest) { - if (this.operationCount >= constants_js_1.BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${constants_js_1.BATCH_MAX_REQUEST} sub requests in a single batch`); - } - const path3 = (0, utils_common_js_1.getURLPath)(subRequest.url); - if (!path3 || path3 === "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + async *listItems(options = {}) { + let marker; + for await (const segment of this.listSegments(marker, options)) { + yield* segment.containerItems; } } - postAddSubRequest(subRequest) { - this.subRequests.set(this.operationCount, subRequest); - this.operationCount++; - } - // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${constants_js_1.HTTP_LINE_ENDING}`; - } - getMultipartContentType() { - return this.multipartContentType; - } - getSubRequests() { - return this.subRequests; - } - }; - function batchRequestAssemblePolicy(batchRequest) { - return { - name: "batchRequestAssemblePolicy", - async sendRequest(request2) { - batchRequest.appendSubRequestToBody(request2); - return { - request: request2, - status: 200, - headers: (0, core_rest_pipeline_1.createHttpHeaders)() - }; - } - }; - } - function batchHeaderFilterPolicy() { - return { - name: "batchHeaderFilterPolicy", - async sendRequest(request2, next) { - let xMsHeaderName = ""; - for (const [name] of request2.headers) { - if ((0, utils_common_js_1.iEqual)(name, constants_js_1.HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = name; - } - } - if (xMsHeaderName !== "") { - request2.headers.delete(xMsHeaderName); - } - return next(request2); + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * ```ts snippet:BlobServiceClientListContainers + * import { BlobServiceClient } from "@azure/storage-blob"; + * import { DefaultAzureCredential } from "@azure/identity"; + * + * const account = ""; + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * new DefaultAzureCredential(), + * ); + * + * // Use for await to iterate the containers + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * + * // Use iter.next() to iterate the containers + * i = 1; + * const iter = blobServiceClient.listContainers(); + * let { value, done } = await iter.next(); + * while (!done) { + * console.log(`Container ${i++}: ${value.name}`); + * ({ value, done } = await iter.next()); + * } + * + * // Use byPage() to iterate the containers + * i = 1; + * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * for (const container of page.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Use paging with a marker + * i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = void 0; } - }; - } - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js -var require_BlobBatchClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobBatchClient.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobBatchClient = void 0; - var BatchResponseParser_js_1 = require_BatchResponseParser(); - var BatchUtils_js_1 = require_BatchUtils(); - var BlobBatch_js_1 = require_BlobBatch(); - var tracing_js_1 = require_tracing(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageContextClient_js_1 = require_StorageContextClient(); - var Pipeline_js_1 = require_Pipeline(); - var utils_common_js_1 = require_utils_common(); - var BlobBatchClient = class { - serviceOrContainerContext; - constructor(url, credentialOrPipeline, options) { - let pipeline; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (!credentialOrPipeline) { - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); + const include = []; + if (options.includeDeleted) { + include.push("deleted"); } - const storageClientContext = new StorageContextClient_js_1.StorageContextClient(url, (0, Pipeline_js_1.getCoreClientOptions)(pipeline)); - const path3 = (0, utils_common_js_1.getURLPath)(url); - if (path3 && path3 !== "/") { - this.serviceOrContainerContext = storageClientContext.container; - } else { - this.serviceOrContainerContext = storageClientContext.service; + if (options.includeMetadata) { + include.push("metadata"); } - } - /** - * Creates a {@link BlobBatch}. - * A BlobBatch represents an aggregated set of operations on blobs. - */ - createBatch() { - return new BlobBatch_js_1.BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch_js_1.BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } + if (options.includeSystem) { + include.push("system"); } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch_js_1.BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + const listSegmentOptions = { + ...options, + ...include.length > 0 ? { include } : {} + }; + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, { + maxPageSize: settings.maxPageSize, + ...listSegmentOptions + }); } - } - return this.submitBatch(batch); + }; } /** - * Submit batch request which consists of multiple subrequests. - * - * Get `blobBatchClient` and other details before running the snippets. - * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` - * - * Example usage: + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * - * ```ts snippet:BlobBatchClientSubmitBatch - * import { DefaultAzureCredential } from "@azure/identity"; - * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. * - * const account = ""; - * const credential = new DefaultAzureCredential(); - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * credential, - * ); + * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobBatchClient = containerClient.getBlobBatchClient(); + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ + startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), + expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions + })); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + const res = { + _response: response._response, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + version: response.version, + date: response.date, + errorCode: response.errorCode, + ...userDelegationKey + }; + return res; + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. * - * const batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob("", credential); - * await batchRequest.deleteBlob("", credential, { - * deleteSnapshots: "include", - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` + * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch * - * Example using a lease: + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. * - * ```ts snippet:BlobBatchClientSubmitBatchWithLease - * import { DefaultAzureCredential } from "@azure/identity"; - * import { BlobServiceClient, BlobBatch } from "@azure/storage-blob"; + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * const account = ""; - * const credential = new DefaultAzureCredential(); - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * credential, - * ); + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blobBatchClient = containerClient.getBlobBatchClient(); - * const blobClient = containerClient.getBlobClient(""); + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1e3); + } + const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ + permissions, + expiresOn, + resourceTypes, + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).toString(); + return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. * - * const batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blobClient, "Cool"); - * await batchRequest.setBlobAccessTier(blobClient, "Cool", { - * conditions: { leaseId: "" }, - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` + * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on + * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch + * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas * - * @param batchRequest - A set of Delete or SetTier operations. - * @param options - + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size === 0) { - throw new RangeError("Batch request should contain one or more sub requests."); + generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } - return tracing_js_1.tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { - const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = (0, utils_common_js_1.assertResponse)(await this.serviceOrContainerContext.submitBatch((0, BatchUtils_js_1.utf8ByteLength)(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, { - ...updatedOptions - })); - const batchResponseParser = new BatchResponseParser_js_1.BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; - }); + if (expiresOn === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1e3); + } + return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ + permissions, + expiresOn, + resourceTypes, + services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), + ...options + }, this.credential).stringToSign; } }; - exports2.BlobBatchClient = BlobBatchClient; + exports2.BlobServiceClient = BlobServiceClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js -var require_ContainerClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/ContainerClient.js"(exports2) { +// node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js +var require_BatchResponse = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ContainerClient = void 0; + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js +var require_generatedModels = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.KnownEncryptionAlgorithmType = void 0; + var KnownEncryptionAlgorithmType; + (function(KnownEncryptionAlgorithmType2) { + KnownEncryptionAlgorithmType2["AES256"] = "AES256"; + })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); + } +}); + +// node_modules/@azure/storage-blob/dist/commonjs/index.js +var require_commonjs15 = __commonJS({ + "node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; + var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var core_auth_1 = require_commonjs7(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var Pipeline_js_1 = require_Pipeline(); - var StorageClient_js_1 = require_StorageClient(); - var tracing_js_1 = require_tracing(); - var utils_common_js_1 = require_utils_common(); + Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { + return core_rest_pipeline_1.RestError; + } }); + tslib_1.__exportStar(require_BlobServiceClient(), exports2); + tslib_1.__exportStar(require_Clients(), exports2); + tslib_1.__exportStar(require_ContainerClient(), exports2); + tslib_1.__exportStar(require_BlobLeaseClient(), exports2); + tslib_1.__exportStar(require_AccountSASPermissions(), exports2); + tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); + tslib_1.__exportStar(require_AccountSASServices(), exports2); + var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); + Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { + return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; + } }); + tslib_1.__exportStar(require_BlobBatch(), exports2); + tslib_1.__exportStar(require_BlobBatchClient(), exports2); + tslib_1.__exportStar(require_BatchResponse(), exports2); + tslib_1.__exportStar(require_BlobSASPermissions(), exports2); var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - var BlobLeaseClient_js_1 = require_BlobLeaseClient(); - var Clients_js_1 = require_Clients(); - var BlobBatchClient_js_1 = require_BlobBatchClient(); - var ContainerClient = class extends StorageClient_js_1.StorageClient { - /** - * containerContext provided by protocol layer. - */ - containerContext; - _containerName; - /** - * The name of the container. - */ - get containerName() { - return this._containerName; - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { - let pipeline; - let url; - options = options || {}; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (core_util_1.isNodeLike && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipelineOrContainerName)) { - url = urlOrConnectionString; - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url = urlOrConnectionString; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url = (0, utils_common_js_1.appendToURLPath)(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName parameter"); + Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { + return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; + } }); + tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); + tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); + tslib_1.__exportStar(require_AnonymousCredential(), exports2); + tslib_1.__exportStar(require_Credential(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); + var models_js_1 = require_models2(); + Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { + return models_js_1.BlockBlobTier; + } }); + Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { + return models_js_1.PremiumPageBlobTier; + } }); + Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { + return models_js_1.StorageBlobAudience; + } }); + Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { + return models_js_1.getBlobServiceAccountAudience; + } }); + var Pipeline_js_1 = require_Pipeline(); + Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { + return Pipeline_js_1.Pipeline; + } }); + Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { + return Pipeline_js_1.isPipelineLike; + } }); + Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { + return Pipeline_js_1.newPipeline; + } }); + Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { + return Pipeline_js_1.StorageOAuthScopes; + } }); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + var RequestPolicy_js_1 = require_RequestPolicy(); + Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { + return RequestPolicy_js_1.BaseRequestPolicy; + } }); + tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); + tslib_1.__exportStar(require_CredentialPolicy(), exports2); + tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); + tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); + tslib_1.__exportStar(require_SASQueryParameters(), exports2); + tslib_1.__exportStar(require_generatedModels(), exports2); + var log_js_1 = require_log6(); + Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { + return log_js_1.logger; + } }); + } +}); + +// node_modules/@actions/cache/lib/internal/shared/errors.js +var require_errors3 = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RateLimitError = exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; + var FilesNotFoundError = class extends Error { + constructor(files = []) { + let message = "No files were found to upload"; + if (files.length > 0) { + message += `: ${files.join(", ")}`; } - super(url, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = this.storageClientContext.container; + super(message); + this.files = files; + this.name = "FilesNotFoundError"; } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, the operation fails. - * @see https://learn.microsoft.com/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - Options to Container Create operation. - * - * - * Example usage: - * - * ```ts snippet:ContainerClientCreate - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const createContainerResponse = await containerClient.create(); - * console.log("Container was created successfully", createContainerResponse.requestId); - * ``` - */ - async create(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.create(updatedOptions)); + }; + exports2.FilesNotFoundError = FilesNotFoundError; + var InvalidResponseError = class extends Error { + constructor(message) { + super(message); + this.name = "InvalidResponseError"; + } + }; + exports2.InvalidResponseError = InvalidResponseError; + var CacheNotFoundError = class extends Error { + constructor(message = "Cache not found") { + super(message); + this.name = "CacheNotFoundError"; + } + }; + exports2.CacheNotFoundError = CacheNotFoundError; + var GHESNotSupportedError = class extends Error { + constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { + super(message); + this.name = "GHESNotSupportedError"; + } + }; + exports2.GHESNotSupportedError = GHESNotSupportedError; + var NetworkError = class extends Error { + constructor(code) { + const message = `Unable to make request: ${code} +If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; + super(message); + this.code = code; + this.name = "NetworkError"; + } + }; + exports2.NetworkError = NetworkError; + NetworkError.isNetworkErrorCode = (code) => { + if (!code) + return false; + return [ + "ECONNRESET", + "ENOTFOUND", + "ETIMEDOUT", + "ECONNREFUSED", + "EHOSTUNREACH" + ].includes(code); + }; + var UsageError = class extends Error { + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. +More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; + super(message); + this.name = "UsageError"; + } + }; + exports2.UsageError = UsageError; + UsageError.isUsageErrorMessage = (msg) => { + if (!msg) + return false; + return msg.includes("insufficient usage"); + }; + var RateLimitError = class extends Error { + constructor(message) { + super(message); + this.name = "RateLimitError"; + } + }; + exports2.RateLimitError = RateLimitError; + } +}); + +// node_modules/@actions/cache/lib/internal/uploadUtils.js +var require_uploadUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); }); } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, it is not changed. - * @see https://learn.microsoft.com/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - - */ - async createIfNotExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { try { - const res = await this.create(updatedOptions); - return { - succeeded: true, - ...res, - _response: res._response - // _response is made non-enumerable - }; + step(generator.next(value)); } catch (e) { - if (e.details?.errorCode === "ContainerAlreadyExists") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } else { - throw e; - } + reject(e); } - }); - } - /** - * Returns true if the Azure container resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing container might be deleted by other clients or - * applications. Vice versa new containers with the same name might be added by other clients or - * applications after this function completes. - * - * @param options - - */ - async exists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + } + function rejected(value) { try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; + step(generator["throw"](value)); } catch (e) { - if (e.statusCode === 404) { - return false; - } - throw e; + reject(e); } - }); - } - /** - * Creates a {@link BlobClient} - * - * @param blobName - A blob name - * @returns A new BlobClient object for the given blob name. - */ - getBlobClient(blobName) { - return new Clients_js_1.BlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UploadProgress = void 0; + exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; + var core12 = __importStar2(require_core()); + var storage_blob_1 = require_commonjs15(); + var errors_1 = require_errors3(); + var UploadProgress = class { + constructor(contentLength) { + this.contentLength = contentLength; + this.sentBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Creates an {@link AppendBlobClient} + * Sets the number of bytes sent * - * @param blobName - An append blob name + * @param sentBytes the number of bytes sent */ - getAppendBlobClient(blobName) { - return new Clients_js_1.AppendBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + setSentBytes(sentBytes) { + this.sentBytes = sentBytes; } /** - * Creates a {@link BlockBlobClient} - * - * @param blobName - A block blob name - * - * - * Example usage: - * - * ```ts snippet:ClientsUpload - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const blobName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * const blockBlobClient = containerClient.getBlockBlobClient(blobName); - * - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); - * ``` + * Returns the total number of bytes transferred. */ - getBlockBlobClient(blobName) { - return new Clients_js_1.BlockBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + getTransferredBytes() { + return this.sentBytes; } /** - * Creates a {@link PageBlobClient} - * - * @param blobName - A page blob name + * Returns true if the upload is complete. */ - getPageBlobClient(blobName) { - return new Clients_js_1.PageBlobClient((0, utils_common_js_1.appendToURLPath)(this.url, (0, utils_common_js_1.EscapePath)(blobName)), this.pipeline); + isDone() { + return this.getTransferredBytes() === this.contentLength; } /** - * Returns all user-defined metadata and system properties for the specified - * container. The data returned does not include the container's list of blobs. - * @see https://learn.microsoft.com/rest/api/storageservices/get-container-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which - * will retain their original casing. - * - * @param options - Options to Container Get Properties operation. + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.sentBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; } - return tracing_js_1.tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.getProperties({ - abortSignal: options.abortSignal, - ...options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); - }); } /** - * Marks the specified container for deletion. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-container - * - * @param options - Options to Container Delete operation. + * Returns a function used to handle TransferProgressEvents. */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - return tracing_js_1.tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.delete({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); - }); + onProgress() { + return (progress) => { + this.setSentBytes(progress.loadedBytes); + }; } /** - * Marks the specified container for deletion if it exists. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-container + * Starts the timer that displays the stats. * - * @param options - Options to Container Delete operation. + * @param delayInMs the delay between each write */ - async deleteIfExists(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { - try { - const res = await this.delete(updatedOptions); - return { - succeeded: true, - ...res, - _response: res._response - }; - } catch (e) { - if (e.details?.errorCode === "ContainerNotFound") { - return { - succeeded: false, - ...e.response?.parsedHeaders, - _response: e.response - }; - } - throw e; + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } - }); + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Sets one or more user-defined name-value pairs for the specified container. - * - * If no option provided, or no metadata defined in the parameter, the container - * metadata will be removed. - * - * @see https://learn.microsoft.com/rest/api/storageservices/set-container-metadata - * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Options to Container Set Metadata operation. + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. */ - async setMetadata(metadata, options = {}) { - if (!options.conditions) { - options.conditions = {}; + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + this.display(); + } + }; + exports2.UploadProgress = UploadProgress; + function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const blobClient = new storage_blob_1.BlobClient(signedUploadURL); + const blockBlobClient = blobClient.getBlockBlobClient(); + const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); + const uploadOptions = { + blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, + concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, + // maximum number of parallel transfer workers + maxSingleShotSize: 128 * 1024 * 1024, + // 128 MiB initial transfer size + onProgress: uploadProgress.onProgress() + }; + try { + uploadProgress.startDisplayTimer(); + core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); + if (response._response.status >= 400) { + throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); + } + return response; + } catch (error3) { + core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); + throw error3; + } finally { + uploadProgress.stopDisplayTimer(); } - return tracing_js_1.tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.setMetadata({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - metadata, - modifiedAccessConditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); + }); + } + } +}); + +// node_modules/@actions/cache/lib/internal/requestUtils.js +var require_requestUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); }); } - /** - * Gets the permissions for the specified container. The permissions indicate - * whether container data may be accessed publicly. - * - * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. - * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-container-acl - * - * @param options - Options to Container Get Access Policy operation. - */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccessPolicy({ - abortSignal: options.abortSignal, - leaseAccessConditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = void 0; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); - } - } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - return res; - }); + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isSuccessStatusCode = isSuccessStatusCode; + exports2.isServerErrorStatusCode = isServerErrorStatusCode; + exports2.isRetryableStatusCode = isRetryableStatusCode; + exports2.retry = retry2; + exports2.retryTypedResponse = retryTypedResponse; + exports2.retryHttpClientResponse = retryHttpClientResponse; + var core12 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var constants_1 = require_constants12(); + function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; } - /** - * Sets the permissions for the specified container. The permissions indicate - * whether blobs in a container may be accessed publicly. - * - * When you set permissions for a container, the existing permissions are replaced. - * If no access or containerAcl provided, the existing container ACL will be - * removed. - * - * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. - * During this interval, a shared access signature that is associated with the stored access policy will - * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://learn.microsoft.com/rest/api/storageservices/set-container-acl - * - * @param access - The level of public access to data in the container. - * @param containerAcl - Array of elements each having a unique Id and details of the access policy. - * @param options - Options to Container Set Access Policy operation. - */ - async setAccessPolicy(access, containerAcl, options = {}) { - options.conditions = options.conditions || {}; - return tracing_js_1.tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { - const acl = []; - for (const identifier of containerAcl || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.expiresOn) : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(identifier.accessPolicy.startsOn) : "" - }, - id: identifier.id - }); + return statusCode >= 200 && statusCode < 300; + } + function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; + } + function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); + } + function sleep(milliseconds) { + return __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve2) => setTimeout(resolve2, milliseconds)); + }); + } + function retry2(name_1, method_1, getStatusCode_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay, onError = void 0) { + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = void 0; + let statusCode = void 0; + let isRetryable = false; + try { + response = yield method(); + } catch (error3) { + if (onError) { + response = onError(error3); + } + isRetryable = true; + errorMessage = error3.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core12.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay2); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); + } + function retryTypedResponse(name_1, method_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { + return yield retry2( + name, + method, + (response) => response.statusCode, + maxAttempts, + delay2, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error3) => { + if (error3 instanceof http_client_1.HttpClientError) { + return { + statusCode: error3.statusCode, + result: null, + headers: {}, + error: error3 + }; + } else { + return void 0; + } } - return (0, utils_common_js_1.assertResponse)(await this.containerContext.setAccessPolicy({ - abortSignal: options.abortSignal, - access, - containerAcl: acl, - leaseAccessConditions: options.conditions, - modifiedAccessConditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - })); - }); + ); + }); + } + function retryHttpClientResponse(name_1, method_1) { + return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { + return yield retry2(name, method, (response) => response.message.statusCode, maxAttempts, delay2); + }); + } + } +}); + +// node_modules/@azure/abort-controller/dist/index.js +var require_dist4 = __commonJS({ + "node_modules/@azure/abort-controller/dist/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var listenersMap = /* @__PURE__ */ new WeakMap(); + var abortedMap = /* @__PURE__ */ new WeakMap(); + var AbortSignal2 = class _AbortSignal { + constructor() { + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); } /** - * Get a {@link BlobLeaseClient} that manages leases on the container. + * Status of whether aborted or not. * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the container. + * @readonly */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient_js_1.BlobLeaseClient(this, proposeLeaseId); + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); } /** - * Creates a new block blob, or updates the content of an existing block blob. - * - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, - * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better - * performance with concurrency uploading. - * - * @see https://learn.microsoft.com/rest/api/storageservices/put-blob + * Creates a new AbortSignal instance that will never be aborted. * - * @param blobName - Name of the block blob to create or update. - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to configure the Block Blob Upload operation. - * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + * @readonly */ - async uploadBlockBlob(blobName, body, contentLength, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body, contentLength, updatedOptions); - return { - blockBlobClient, - response - }; - }); + static get none() { + return new _AbortSignal(); } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://learn.microsoft.com/rest/api/storageservices/delete-blob + * Added new "abort" event listener, only support "abort" event. * - * @param blobName - - * @param options - Options to Blob Delete operation. - * @returns Block blob deletion response data. + * @param _type - Only support "abort" event + * @param listener - The listener to be added */ - async deleteBlob(blobName, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return blobClient.delete(updatedOptions); - }); + addEventListener(_type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); } /** - * listBlobFlatSegment returns a single segment of blobs starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs + * Remove "abort" event listener, only support "abort" event. * - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Flat Segment operation. + * @param _type - Only support "abort" event + * @param listener - The listener to be removed */ - async listBlobFlatSegment(marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobFlatSegment({ - marker, - ...options, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedResponse = { - ...response, - _response: { - ...response._response, - parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobFlat)(response._response.parsedBody) - }, - // _response is made non-enumerable - segment: { - ...response.segment, - blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = { - ...blobItemInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), - tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) - }; - return blobItem; - }) - } - }; - return wrappedResponse; - }); + removeEventListener(_type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } } /** - * listBlobHierarchySegment returns a single segment of blobs starting from - * the specified Marker. Use an empty Marker to start enumeration from the - * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. - * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Hierarchy Segment operation. + * Dispatches a synthetic event to the AbortSignal. */ - async listBlobHierarchySegment(delimiter, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.listBlobHierarchySegment(delimiter, { - marker, - ...options, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedResponse = { - ...response, - _response: { - ...response._response, - parsedBody: (0, utils_common_js_1.ConvertInternalResponseOfListBlobHierarchy)(response._response.parsedBody) - }, - // _response is made non-enumerable - segment: { - ...response.segment, - blobItems: response.segment.blobItems.map((blobItemInternal) => { - const blobItem = { - ...blobItemInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobItemInternal.name), - tags: (0, utils_common_js_1.toTags)(blobItemInternal.blobTags), - objectReplicationSourceProperties: (0, utils_common_js_1.parseObjectReplicationRecord)(blobItemInternal.objectReplicationMetadata) - }; - return blobItem; - }), - blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => { - const blobPrefix = { - ...blobPrefixInternal, - name: (0, utils_common_js_1.BlobNameToString)(blobPrefixInternal.name) - }; - return blobPrefix; - }) - } - }; - return wrappedResponse; + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } + }; + function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); }); } + abortedMap.set(signal, true); + } + var AbortError = class extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } + }; + var AbortController2 = class { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal2(); + if (!parentSignals) { + return; + } + if (!Array.isArray(parentSignals)) { + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + if (parentSignal.aborted) { + this.abort(); + } else { + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } /** - * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. * - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * @readonly */ - async *listSegments(marker, options = {}) { - let listBlobsFlatSegmentResponse; - if (!!marker || marker === void 0) { - do { - listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options); - marker = listBlobsFlatSegmentResponse.continuationToken; - yield await listBlobsFlatSegmentResponse; - } while (marker); - } + get signal() { + return this._signal; } /** - * Returns an AsyncIterableIterator of {@link BlobItem} objects - * - * @param options - Options to list blobs operation. + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. */ - async *listItems(options = {}) { - let marker; - for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) { - yield* listBlobsFlatSegmentResponse.segment.blobItems; - } + abort() { + abortSignal(this._signal); } /** - * Returns an async iterable iterator to list all the blobs - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * ```ts snippet:ReadmeSampleListBlobs_Multiple - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * - * // Example using `for await` syntax - * let i = 1; - * const blobs = containerClient.listBlobsFlat(); - * for await (const blob of blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = containerClient.listBlobsFlat(); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Blob ${i++}: ${value.name}`); - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of page.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Example using paging with a marker - * i = 1; - * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param options - Options to list blobs. - * @returns An asyncIterableIterator that supports paging. + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. */ - listBlobsFlat(options = {}) { - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); + static timeout(ms) { + const signal = new AbortSignal2(); + const timer = setTimeout(abortSignal, ms, signal); + if (typeof timer.unref === "function") { + timer.unref(); } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); + return signal; + } + }; + exports2.AbortController = AbortController2; + exports2.AbortError = AbortError; + exports2.AbortSignal = AbortSignal2; + } +}); + +// node_modules/@actions/cache/lib/internal/downloadUtils.js +var require_downloadUtils = __commonJS({ + "node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (options.includeLegalHold) { - include.push("legalhold"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (options.prefix === "") { - options.prefix = void 0; + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); } - const updatedOptions = { - ...options, - ...include.length > 0 ? { include } : {} - }; - const iter = this.listItems(updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...updatedOptions - }); - } - }; + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DownloadProgress = void 0; + exports2.downloadCacheHttpClient = downloadCacheHttpClient; + exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; + exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; + var core12 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var storage_blob_1 = require_commonjs15(); + var buffer = __importStar2(require("buffer")); + var fs2 = __importStar2(require("fs")); + var stream = __importStar2(require("stream")); + var util = __importStar2(require("util")); + var utils = __importStar2(require_cacheUtils()); + var constants_1 = require_constants12(); + var requestUtils_1 = require_requestUtils(); + var abort_controller_1 = require_dist4(); + function pipeResponseToStream(response, output) { + return __awaiter2(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); + } + var DownloadProgress = class { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * Progress to the next segment. Only call this method when the previous segment + * is complete. * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * @param segmentSize the length of the next segment */ - async *listHierarchySegments(delimiter, marker, options = {}) { - let listBlobsHierarchySegmentResponse; - if (!!marker || marker === void 0) { - do { - listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(delimiter, marker, options); - marker = listBlobsHierarchySegmentResponse.continuationToken; - yield await listBlobsHierarchySegmentResponse; - } while (marker); - } + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** - * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * Sets the number of bytes received for the current segment. * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * @param receivedBytes the number of bytes received */ - async *listItemsByHierarchy(delimiter, options = {}) { - let marker; - for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(delimiter, marker, options)) { - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix of segment.blobPrefixes) { - yield { - kind: "prefix", - ...prefix - }; - } - } - for (const blob of segment.blobItems) { - yield { kind: "blob", ...blob }; - } - } + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; } /** - * Returns an async iterable iterator to list all the blobs by hierarchy. - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. - * - * ```ts snippet:ReadmeSampleListBlobsByHierarchy - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * - * // Example using `for await` syntax - * let i = 1; - * const blobs = containerClient.listBlobsByHierarchy("/"); - * for await (const blob of blobs) { - * if (blob.kind === "prefix") { - * console.log(`\tBlobPrefix: ${blob.name}`); - * } else { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = containerClient.listBlobsByHierarchy("/"); - * let { value, done } = await iter.next(); - * while (!done) { - * if (value.kind === "prefix") { - * console.log(`\tBlobPrefix: ${value.name}`); - * } else { - * console.log(`\tBlobItem: name - ${value.name}`); - * } - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 20 })) { - * const segment = page.segment; - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * for (const blob of page.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * - * // Example using paging with a marker - * i = 1; - * let iterator = containerClient.listBlobsByHierarchy("/").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.blobPrefixes) { - * for (const prefix of response.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient - * .listBlobsByHierarchy("/") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.blobPrefixes) { - * for (const prefix of response.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } - * } - * if (response.segment.blobItems) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * Returns the total number of bytes transferred. */ - listBlobsByHierarchy(delimiter, options = {}) { - if (delimiter === "") { - throw new RangeError("delimiter should contain one or more characters"); - } - const include = []; - if (options.includeCopy) { - include.push("copy"); - } - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSnapshots) { - include.push("snapshots"); - } - if (options.includeVersions) { - include.push("versions"); - } - if (options.includeUncommitedBlobs) { - include.push("uncommittedblobs"); - } - if (options.includeTags) { - include.push("tags"); - } - if (options.includeDeletedWithVersions) { - include.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; - } - const updatedOptions = { - ...options, - ...include.length > 0 ? { include } : {} - }; - const iter = this.listItemsByHierarchy(delimiter, updatedOptions); - return { - /** - * The next method, part of the iteration protocol - */ - async next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listHierarchySegments(delimiter, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...updatedOptions - }); - } - }; + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; } /** - * The Filter Blobs operation enables callers to list blobs in the container whose tags - * match a given search expression. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * Returns true if the download is complete. */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_1.assertResponse)(await this.containerContext.filterBlobs({ - abortSignal: options.abortSignal, - where: tagFilterSqlExpression, - marker, - maxPageSize: options.maxPageSize, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedResponse = { - ...response, - _response: response._response, - // _response is made non-enumerable - blobs: response.blobs.map((blob) => { - let tagValue = ""; - if (blob.tags?.blobTagSet.length === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; - }) - }; - return wrappedResponse; - }); + isDone() { + return this.getTransferredBytes() === this.contentLength; } /** - * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. */ - async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - let response; - if (!!marker || marker === void 0) { - do { - response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield response; - } while (marker); + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; } } /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. + * Returns a function used to handle TransferProgressEvents. */ - async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - let marker; - for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { - yield* segment.blobs; - } + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; } /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified container. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * Example using `for await` syntax: - * - * ```ts snippet:ReadmeSampleFindBlobsByTags - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerName = ""; - * const containerClient = blobServiceClient.getContainerClient(containerName); - * - * // Example using `for await` syntax - * let i = 1; - * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * - * // Example using `iter.next()` syntax - * i = 1; - * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Blob ${i++}: ${value.name}`); - * ({ value, done } = await iter.next()); - * } - * - * // Example using `byPage()` syntax - * i = 1; - * for await (const page of containerClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ maxPageSize: 20 })) { - * for (const blob of page.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Example using paging with a marker - * i = 1; - * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * // Prints 10 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` + * Starts the timer that displays the stats. * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * @param delayInMs the delay between each write */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = { - ...options + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } }; - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...listSegmentOptions + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; + } + this.display(); + } + }; + exports2.DownloadProgress = DownloadProgress; + function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter2(this, void 0, void 0, function* () { + const writeStream = fs2.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient("actions/cache"); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation); + })); + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } else { + core12.debug("Unable to validate download, no Content-Length header"); + } + }); + } + function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const archiveDescriptor = yield fs2.promises.open(archivePath, "w"); + const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter2(this, void 0, void 0, function* () { + return yield httpClient.request("HEAD", archiveLocation, null, {}); + })); + const lengthHeader = res.message.headers["content-length"]; + if (lengthHeader === void 0 || lengthHeader === null) { + throw new Error("Content-Length not found on blob response"); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: () => __awaiter2(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }) }); } - }; - } - /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. - */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_1.assertResponse)(await this.containerContext.getAccountInfo({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); - } - getContainerNameFromUrl() { - let containerName; - try { - const parsedUrl = new URL(this.url); - if (parsedUrl.hostname.split(".")[1] === "blob") { - containerName = parsedUrl.pathname.split("/")[1]; - } else if ((0, utils_common_js_1.isIpEndpointStyle)(parsedUrl)) { - containerName = parsedUrl.pathname.split("/")[2]; - } else { - containerName = parsedUrl.pathname.split("/")[1]; + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = () => __awaiter2(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }); + while (nextDownload = downloads.pop()) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); } - containerName = decodeURIComponent(containerName); - if (!containerName) { - throw new Error("Provided containerName is invalid."); + } finally { + httpClient.dispose(); + yield archiveDescriptor.close(); + } + }); + } + function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter2(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 3e4; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === "string") { + throw new Error("downloadSegmentRetry failed due to timeout"); + } + return result; + } catch (err) { + if (failures >= retries) { + throw err; + } + failures++; } - return containerName; - } catch (error3) { - throw new Error("Unable to extract containerName with provided information."); } - } - /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateSasUrl(options) { - return new Promise((resolve2) => { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + }); + } + function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter2(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter2(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); + } + function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + var _a; + const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs } - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - ...options - }, this.credential).toString(); - resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + core12.debug("Unable to determine content length, downloading file with http-client..."); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } else { + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs2.openSync(archivePath, "w"); + try { + downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + })); + if (result === "timeout") { + controller.abort(); + throw new Error("Aborting cache download as the download time exceeded the timeout."); + } else if (Buffer.isBuffer(result)) { + fs2.writeFileSync(fd, result); + } + } + } finally { + downloadProgress.stopDisplayTimer(); + fs2.closeSync(fd); + } + } + }); + } + var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise((resolve2) => { + timeoutHandle = setTimeout(() => resolve2("timeout"), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then((result) => { + clearTimeout(timeoutHandle); + return result; + }); + }); + } +}); + +// node_modules/@actions/cache/lib/options.js +var require_options = __commonJS({ + "node_modules/@actions/cache/lib/options.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI - * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - generateSasStringToSign(options) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getUploadOptions = getUploadOptions; + exports2.getDownloadOptions = getDownloadOptions; + var core12 = __importStar2(require_core()); + function getUploadOptions(copy) { + const result = { + useAzureSdk: false, + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.uploadConcurrency === "number") { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === "number") { + result.uploadChunkSize = copy.uploadChunkSize; } - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - ...options - }, this.credential).stringToSign; } - /** - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasUrl(options, userDelegationKey) { - return new Promise((resolve2) => { - const sas = (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParameters)({ - containerName: this._containerName, - ...options - }, userDelegationKey, this.accountName).toString(); - resolve2((0, utils_common_js_1.appendToURLQuery)(this.url, sas)); - }); + result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; + result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; + } + function getDownloadOptions(copy) { + const result = { + useAzureSdk: false, + concurrentBlobDownloads: true, + downloadConcurrency: 8, + timeoutInMs: 3e4, + segmentTimeoutInMs: 6e5, + lookupOnly: false + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.concurrentBlobDownloads === "boolean") { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; + } + if (typeof copy.downloadConcurrency === "number") { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === "number") { + result.timeoutInMs = copy.timeoutInMs; + } + if (typeof copy.segmentTimeoutInMs === "number") { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + if (typeof copy.lookupOnly === "boolean") { + result.lookupOnly = copy.lookupOnly; + } } - /** - * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI - * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-a-service-sas - * - * @param options - Optional parameters. - * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. - */ - generateUserDelegationSasStringToSign(options, userDelegationKey) { - return (0, BlobSASSignatureValues_js_1.generateBlobSASQueryParametersInternal)({ - containerName: this._containerName, - ...options - }, userDelegationKey, this.accountName).stringToSign; + const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; + if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this container. - */ - getBlobBatchClient() { - return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); + core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core12.debug(`Download concurrency: ${result.downloadConcurrency}`); + core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core12.debug(`Lookup only: ${result.lookupOnly}`); + return result; + } + } +}); + +// node_modules/@actions/cache/lib/internal/config.js +var require_config = __commonJS({ + "node_modules/@actions/cache/lib/internal/config.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isGhes = isGhes; + exports2.getCacheServiceVersion = getCacheServiceVersion; + exports2.getCacheServiceURL = getCacheServiceURL; + function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === "GITHUB.COM"; + const isGheHost = hostname.endsWith(".GHE.COM"); + const isLocalHost = hostname.endsWith(".LOCALHOST"); + return !isGitHubHost && !isGheHost && !isLocalHost; + } + function getCacheServiceVersion() { + if (isGhes()) + return "v1"; + return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; + } + function getCacheServiceURL() { + const version = getCacheServiceVersion(); + switch (version) { + case "v1": + return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; + case "v2": + return process.env["ACTIONS_RESULTS_URL"] || ""; + default: + throw new Error(`Unsupported cache service version: ${version}`); + } + } + } +}); + +// node_modules/@actions/cache/package.json +var require_package2 = __commonJS({ + "node_modules/@actions/cache/package.json"(exports2, module2) { + module2.exports = { + name: "@actions/cache", + version: "5.0.5", + preview: true, + description: "Actions cache lib", + keywords: [ + "github", + "actions", + "cache" + ], + homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", + license: "MIT", + main: "lib/cache.js", + types: "lib/cache.d.ts", + directories: { + lib: "lib", + test: "__tests__" + }, + files: [ + "lib", + "!.DS_Store" + ], + publishConfig: { + access: "public" + }, + repository: { + type: "git", + url: "git+https://github.com/actions/toolkit.git", + directory: "packages/cache" + }, + scripts: { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + test: 'echo "Error: run tests from root" && exit 1', + tsc: "tsc" + }, + bugs: { + url: "https://github.com/actions/toolkit/issues" + }, + dependencies: { + "@actions/core": "^2.0.0", + "@actions/exec": "^2.0.0", + "@actions/glob": "^0.5.1", + "@protobuf-ts/runtime-rpc": "^2.11.1", + "@actions/http-client": "^3.0.2", + "@actions/io": "^2.0.0", + "@azure/abort-controller": "^1.1.0", + "@azure/core-rest-pipeline": "^1.22.0", + "@azure/storage-blob": "^12.29.1", + semver: "^6.3.1" + }, + devDependencies: { + "@types/node": "^24.1.0", + "@types/semver": "^6.0.0", + "@protobuf-ts/plugin": "^2.9.4", + typescript: "^5.2.2" + }, + overrides: { + "uri-js": "npm:uri-js-replace@^1.0.1", + "node-fetch": "^3.3.2" } }; - exports2.ContainerClient = ContainerClient; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js -var require_AccountSASPermissions = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASPermissions.js"(exports2) { +// node_modules/@actions/cache/lib/internal/shared/user-agent.js +var require_user_agent = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASPermissions = void 0; - var AccountSASPermissions = class _AccountSASPermissions { - /** - * Parse initializes the AccountSASPermissions fields from a string. - * - * @param permissions - - */ - static parse(permissions) { - const accountSASPermissions = new _AccountSASPermissions(); - for (const c of permissions) { - switch (c) { - case "r": - accountSASPermissions.read = true; - break; - case "w": - accountSASPermissions.write = true; - break; - case "d": - accountSASPermissions.delete = true; - break; - case "x": - accountSASPermissions.deleteVersion = true; - break; - case "l": - accountSASPermissions.list = true; - break; - case "a": - accountSASPermissions.add = true; - break; - case "c": - accountSASPermissions.create = true; - break; - case "u": - accountSASPermissions.update = true; - break; - case "p": - accountSASPermissions.process = true; - break; - case "t": - accountSASPermissions.tag = true; - break; - case "f": - accountSASPermissions.filter = true; - break; - case "i": - accountSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - accountSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission character: ${c}`); - } + exports2.getUserAgentString = getUserAgentString; + var packageJson = require_package2(); + function getUserAgentString() { + return `@actions/cache-${packageJson.version}`; + } + } +}); + +// node_modules/@actions/cache/lib/internal/cacheHttpClient.js +var require_cacheHttpClient = __commonJS({ + "node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { + "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); } - return accountSASPermissions; + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); } - /** - * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - - */ - static from(permissionLike) { - const accountSASPermissions = new _AccountSASPermissions(); - if (permissionLike.read) { - accountSASPermissions.read = true; - } - if (permissionLike.write) { - accountSASPermissions.write = true; - } - if (permissionLike.delete) { - accountSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - accountSASPermissions.deleteVersion = true; - } - if (permissionLike.filter) { - accountSASPermissions.filter = true; - } - if (permissionLike.tag) { - accountSASPermissions.tag = true; - } - if (permissionLike.list) { - accountSASPermissions.list = true; - } - if (permissionLike.add) { - accountSASPermissions.add = true; - } - if (permissionLike.create) { - accountSASPermissions.create = true; - } - if (permissionLike.update) { - accountSASPermissions.update = true; - } - if (permissionLike.process) { - accountSASPermissions.process = true; + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (permissionLike.setImmutabilityPolicy) { - accountSASPermissions.setImmutabilityPolicy = true; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (permissionLike.permanentDelete) { - accountSASPermissions.permanentDelete = true; + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); } - return accountSASPermissions; + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCacheEntry = getCacheEntry; + exports2.downloadCache = downloadCache; + exports2.reserveCache = reserveCache; + exports2.saveCache = saveCache3; + var core12 = __importStar2(require_core()); + var http_client_1 = require_lib(); + var auth_1 = require_auth(); + var fs2 = __importStar2(require("fs")); + var url_1 = require("url"); + var utils = __importStar2(require_cacheUtils()); + var uploadUtils_1 = require_uploadUtils(); + var downloadUtils_1 = require_downloadUtils(); + var options_1 = require_options(); + var requestUtils_1 = require_requestUtils(); + var config_1 = require_config(); + var user_agent_1 = require_user_agent(); + function getCacheApiUrl(resource) { + const baseUrl = (0, config_1.getCacheServiceURL)(); + if (!baseUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); } - /** - * Permission to read resources and list queues and tables granted. - */ - read = false; - /** - * Permission to write resources granted. - */ - write = false; - /** - * Permission to delete blobs and files granted. - */ - delete = false; - /** - * Permission to delete versions granted. - */ - deleteVersion = false; - /** - * Permission to list blob containers, blobs, shares, directories, and files granted. - */ - list = false; - /** - * Permission to add messages, table entities, and append to blobs granted. - */ - add = false; - /** - * Permission to create blobs and files granted. - */ - create = false; - /** - * Permissions to update messages and table entities granted. - */ - update = false; - /** - * Permission to get and delete messages granted. - */ - process = false; - /** - * Specfies Tag access granted. - */ - tag = false; - /** - * Permission to filter blobs. - */ - filter = false; - /** - * Permission to set immutability policy. - */ - setImmutabilityPolicy = false; - /** - * Specifies that Permanent Delete is permitted. - */ - permanentDelete = false; - /** - * Produces the SAS permissions string for an Azure Storage account. - * Call this method to set AccountSASSignatureValues Permissions field. - * - * Using this method will guarantee the resource types are in - * an order accepted by the service. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas - * - */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.filter) { - permissions.push("f"); + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core12.debug(`Resource Url: ${url}`); + return url; + } + function createAcceptHeader(type2, apiVersion) { + return `${type2};api-version=${apiVersion}`; + } + function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader("application/json", "6.0-preview.1") } - if (this.tag) { - permissions.push("t"); + }; + return requestOptions; + } + function createHttpClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); + } + function getCacheEntry(keys, paths, options) { + return __awaiter2(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 204) { + if (core12.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version); + } + return null; } - if (this.list) { - permissions.push("l"); + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); } - if (this.add) { - permissions.push("a"); + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error("Cache not found."); } - if (this.create) { - permissions.push("c"); + core12.setSecret(cacheDownloadUrl); + core12.debug(`Cache Result:`); + core12.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); + } + function printCachesListForDiagnostics(key, httpClient, version) { + return __awaiter2(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key +Other caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + } + } } - if (this.update) { - permissions.push("u"); + }); + } + function downloadCache(archiveLocation, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = (0, options_1.getDownloadOptions)(options); + if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { + if (downloadOptions.useAzureSdk) { + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } else if (downloadOptions.concurrentBlobDownloads) { + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); + } else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + } + } else { + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } - if (this.process) { - permissions.push("p"); + }); + } + function reserveCache(key, paths, options) { + return __awaiter2(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + })); + return response; + }); + } + function getContentRange(start, end) { + return `bytes ${start}-${end}/*`; + } + function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter2(this, void 0, void 0, function* () { + core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) + }; + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); + })); + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } - if (this.setImmutabilityPolicy) { - permissions.push("i"); + }); + } + function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter2(this, void 0, void 0, function* () { + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs2.openSync(archivePath, "r"); + const uploadOptions = (0, options_1.getUploadOptions)(options); + const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core12.debug("Awaiting all uploads"); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs2.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }).on("error", (error3) => { + throw new Error(`Cache upload failed because file read failed with ${error3.message}`); + }), start, end); + } + }))); + } finally { + fs2.closeSync(fd); } - if (this.permanentDelete) { - permissions.push("y"); + return; + }); + } + function commitCache(httpClient, cacheId, filesize) { + return __awaiter2(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter2(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); + } + function saveCache3(cacheId, archivePath, signedUploadURL, options) { + return __awaiter2(this, void 0, void 0, function* () { + const uploadOptions = (0, options_1.getUploadOptions)(options); + if (uploadOptions.useAzureSdk) { + if (!signedUploadURL) { + throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); + } + yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); + } else { + const httpClient = createHttpClient(); + core12.debug("Upload cache"); + yield uploadFile(httpClient, cacheId, archivePath, options); + core12.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core12.info("Cache saved successfully"); } - return permissions.join(""); + }); + } + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js +var require_json_typings = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isJsonObject = exports2.typeofJsonValue = void 0; + function typeofJsonValue(value) { + let t = typeof value; + if (t == "object") { + if (Array.isArray(value)) + return "array"; + if (value === null) + return "null"; } - }; - exports2.AccountSASPermissions = AccountSASPermissions; + return t; + } + exports2.typeofJsonValue = typeofJsonValue; + function isJsonObject(value) { + return value !== null && typeof value == "object" && !Array.isArray(value); + } + exports2.isJsonObject = isJsonObject; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js -var require_AccountSASResourceTypes = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASResourceTypes.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/base64.js +var require_base642 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASResourceTypes = void 0; - var AccountSASResourceTypes = class _AccountSASResourceTypes { - /** - * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an - * Error if it encounters a character that does not correspond to a valid resource type. - * - * @param resourceTypes - - */ - static parse(resourceTypes) { - const accountSASResourceTypes = new _AccountSASResourceTypes(); - for (const c of resourceTypes) { - switch (c) { - case "s": - accountSASResourceTypes.service = true; - break; - case "c": - accountSASResourceTypes.container = true; - break; - case "o": - accountSASResourceTypes.object = true; - break; + exports2.base64encode = exports2.base64decode = void 0; + var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); + var decTable = []; + for (let i = 0; i < encTable.length; i++) + decTable[encTable[i].charCodeAt(0)] = i; + decTable["-".charCodeAt(0)] = encTable.indexOf("+"); + decTable["_".charCodeAt(0)] = encTable.indexOf("/"); + function base64decode(base64Str) { + let es = base64Str.length * 3 / 4; + if (base64Str[base64Str.length - 2] == "=") + es -= 2; + else if (base64Str[base64Str.length - 1] == "=") + es -= 1; + let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; + for (let i = 0; i < base64Str.length; i++) { + b = decTable[base64Str.charCodeAt(i)]; + if (b === void 0) { + switch (base64Str[i]) { + case "=": + groupPos = 0; + // reset state when padding found + case "\n": + case "\r": + case " ": + case " ": + continue; + // skip white-space, and padding default: - throw new RangeError(`Invalid resource type: ${c}`); + throw Error(`invalid base64 string.`); } } - return accountSASResourceTypes; - } - /** - * Permission to access service level APIs granted. - */ - service = false; - /** - * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. - */ - container = false; - /** - * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. - */ - object = false; - /** - * Converts the given resource types to a string. - * - * @see https://learn.microsoft.com/rest/api/storageservices/constructing-an-account-sas - * - */ - toString() { - const resourceTypes = []; - if (this.service) { - resourceTypes.push("s"); - } - if (this.container) { - resourceTypes.push("c"); + switch (groupPos) { + case 0: + p = b; + groupPos = 1; + break; + case 1: + bytes[bytePos++] = p << 2 | (b & 48) >> 4; + p = b; + groupPos = 2; + break; + case 2: + bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; + p = b; + groupPos = 3; + break; + case 3: + bytes[bytePos++] = (p & 3) << 6 | b; + groupPos = 0; + break; } - if (this.object) { - resourceTypes.push("o"); + } + if (groupPos == 1) + throw Error(`invalid base64 string.`); + return bytes.subarray(0, bytePos); + } + exports2.base64decode = base64decode; + function base64encode(bytes) { + let base64 = "", groupPos = 0, b, p = 0; + for (let i = 0; i < bytes.length; i++) { + b = bytes[i]; + switch (groupPos) { + case 0: + base64 += encTable[b >> 2]; + p = (b & 3) << 4; + groupPos = 1; + break; + case 1: + base64 += encTable[p | b >> 4]; + p = (b & 15) << 2; + groupPos = 2; + break; + case 2: + base64 += encTable[p | b >> 6]; + base64 += encTable[b & 63]; + groupPos = 0; + break; } - return resourceTypes.join(""); } - }; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; + if (groupPos) { + base64 += encTable[p]; + base64 += "="; + if (groupPos == 1) + base64 += "="; + } + return base64; + } + exports2.base64encode = base64encode; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js -var require_AccountSASServices = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASServices.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js +var require_protobufjs_utf8 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.AccountSASServices = void 0; - var AccountSASServices = class _AccountSASServices { - /** - * Creates an {@link AccountSASServices} from the specified services string. This method will throw an - * Error if it encounters a character that does not correspond to a valid service. - * - * @param services - - */ - static parse(services) { - const accountSASServices = new _AccountSASServices(); - for (const c of services) { - switch (c) { - case "b": - accountSASServices.blob = true; - break; - case "f": - accountSASServices.file = true; - break; - case "q": - accountSASServices.queue = true; - break; - case "t": - accountSASServices.table = true; - break; - default: - throw new RangeError(`Invalid service character: ${c}`); - } + exports2.utf8read = void 0; + var fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); + function utf8read(bytes) { + if (bytes.length < 1) + return ""; + let pos = 0, parts = [], chunk = [], i = 0, t; + let len = bytes.length; + while (pos < len) { + t = bytes[pos++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; + chunk[i++] = 55296 + (t >> 10); + chunk[i++] = 56320 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; + if (i > 8191) { + parts.push(fromCharCodes(chunk)); + i = 0; } - return accountSASServices; } - /** - * Permission to access blob resources granted. - */ - blob = false; - /** - * Permission to access file resources granted. - */ - file = false; - /** - * Permission to access queue resources granted. - */ - queue = false; - /** - * Permission to access table resources granted. - */ - table = false; - /** - * Converts the given services to a string. - * - */ - toString() { - const services = []; - if (this.blob) { - services.push("b"); - } - if (this.table) { - services.push("t"); - } - if (this.queue) { - services.push("q"); - } - if (this.file) { - services.push("f"); - } - return services.join(""); + if (parts.length) { + if (i) + parts.push(fromCharCodes(chunk.slice(0, i))); + return parts.join(""); } - }; - exports2.AccountSASServices = AccountSASServices; + return fromCharCodes(chunk.slice(0, i)); + } + exports2.utf8read = utf8read; } }); -// node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js -var require_AccountSASSignatureValues = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/sas/AccountSASSignatureValues.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js +var require_binary_format_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateAccountSASQueryParametersInternal = generateAccountSASQueryParametersInternal; - var AccountSASPermissions_js_1 = require_AccountSASPermissions(); - var AccountSASResourceTypes_js_1 = require_AccountSASResourceTypes(); - var AccountSASServices_js_1 = require_AccountSASServices(); - var SasIPRange_js_1 = require_SasIPRange(); - var SASQueryParameters_js_1 = require_SASQueryParameters(); - var constants_js_1 = require_constants15(); - var utils_common_js_1 = require_utils_common(); - function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; - } - function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { - const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : constants_js_1.SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); - } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); - } - if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); - } - const parsedPermissions = AccountSASPermissions_js_1.AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices_js_1.AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes_js_1.AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - let stringToSign; - if (version >= "2020-12-06") { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", - (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", - "" - // Account SAS requires an additional newline character - ].join("\n"); - } else { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.startsOn, false) : "", - (0, utils_common_js_1.truncatedISO8061Date)(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? (0, SasIPRange_js_1.ipRangeToString)(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" - // Account SAS requires an additional newline character - ].join("\n"); - } - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return { - sasQueryParameters: new SASQueryParameters_js_1.SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), - stringToSign + exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; + var UnknownFieldHandler; + (function(UnknownFieldHandler2) { + UnknownFieldHandler2.symbol = /* @__PURE__ */ Symbol.for("protobuf-ts/unknown"); + UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { + let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; + container.push({ no: fieldNo, wireType, data }); + }; + UnknownFieldHandler2.onWrite = (typeName, message, writer) => { + for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) + writer.tag(no, wireType).raw(data); + }; + UnknownFieldHandler2.list = (message, fieldNo) => { + if (is(message)) { + let all = message[UnknownFieldHandler2.symbol]; + return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; + } + return []; }; + UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; + const is = (message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]); + })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); + function mergeBinaryOptions(a, b) { + return Object.assign(Object.assign({}, a), b); } + exports2.mergeBinaryOptions = mergeBinaryOptions; + var WireType; + (function(WireType2) { + WireType2[WireType2["Varint"] = 0] = "Varint"; + WireType2[WireType2["Bit64"] = 1] = "Bit64"; + WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; + WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; + WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; + WireType2[WireType2["Bit32"] = 5] = "Bit32"; + })(WireType = exports2.WireType || (exports2.WireType = {})); } }); -// node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js -var require_BlobServiceClient = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BlobServiceClient.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js +var require_goog_varint = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BlobServiceClient = void 0; - var core_auth_1 = require_commonjs7(); - var core_rest_pipeline_1 = require_commonjs6(); - var core_util_1 = require_commonjs4(); - var Pipeline_js_1 = require_Pipeline(); - var ContainerClient_js_1 = require_ContainerClient(); - var utils_common_js_1 = require_utils_common(); - var StorageSharedKeyCredential_js_1 = require_StorageSharedKeyCredential(); - var AnonymousCredential_js_1 = require_AnonymousCredential(); - var utils_common_js_2 = require_utils_common(); - var tracing_js_1 = require_tracing(); - var BlobBatchClient_js_1 = require_BlobBatchClient(); - var StorageClient_js_1 = require_StorageClient(); - var AccountSASPermissions_js_1 = require_AccountSASPermissions(); - var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); - var AccountSASServices_js_1 = require_AccountSASServices(); - var BlobServiceClient = class _BlobServiceClient extends StorageClient_js_1.StorageClient { - /** - * serviceContext provided by protocol layer. - */ - serviceContext; - /** - * - * Creates an instance of BlobServiceClient from connection string. - * - * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. - * [ Note - Account connection string can only be used in NODE.JS runtime. ] - * Account connection string example - - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` - * SAS connection string example - - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` - * @param options - Optional. Options to configure the HTTP pipeline. - */ - static fromConnectionString(connectionString, options) { - options = options || {}; - const extractedCreds = (0, utils_common_js_1.extractConnectionStringParts)(connectionString); - if (extractedCreds.kind === "AccountConnString") { - if (core_util_1.isNodeLike) { - const sharedKeyCredential = new StorageSharedKeyCredential_js_1.StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - if (!options.proxyOptions) { - options.proxyOptions = (0, core_rest_pipeline_1.getDefaultProxySettings)(extractedCreds.proxyUri); - } - const pipeline = (0, Pipeline_js_1.newPipeline)(sharedKeyCredential, options); - return new _BlobServiceClient(extractedCreds.url, pipeline); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - const pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); - return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } - constructor(url, credentialOrPipeline, options) { - let pipeline; - if ((0, Pipeline_js_1.isPipelineLike)(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (core_util_1.isNodeLike && credentialOrPipeline instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential_js_1.AnonymousCredential || (0, core_auth_1.isTokenCredential)(credentialOrPipeline)) { - pipeline = (0, Pipeline_js_1.newPipeline)(credentialOrPipeline, options); - } else { - pipeline = (0, Pipeline_js_1.newPipeline)(new AnonymousCredential_js_1.AnonymousCredential(), options); + exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; + function varint64read() { + let lowBits = 0; + let highBits = 0; + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - super(url, pipeline); - this.serviceContext = this.storageClientContext.service; - } - /** - * Creates a {@link ContainerClient} object - * - * @param containerName - A container name - * @returns A new ContainerClient object for the given container name. - * - * Example usage: - * - * ```ts snippet:BlobServiceClientGetContainerClient - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * const containerClient = blobServiceClient.getContainerClient(""); - * ``` - */ - getContainerClient(containerName) { - return new ContainerClient_js_1.ContainerClient((0, utils_common_js_1.appendToURLPath)(this.url, encodeURIComponent(containerName)), this.pipeline); } - /** - * Create a Blob container. @see https://learn.microsoft.com/rest/api/storageservices/create-container - * - * @param containerName - Name of the container to create. - * @param options - Options to configure Container Create operation. - * @returns Container creation response and the corresponding container client. - */ - async createContainer(containerName, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; - }); + let middleByte = this.buf[this.pos++]; + lowBits |= (middleByte & 15) << 28; + highBits = (middleByte & 112) >> 4; + if ((middleByte & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - /** - * Deletes a Blob container. - * - * @param containerName - Name of the container to delete. - * @param options - Options to configure Container Delete operation. - * @returns Container deletion response. - */ - async deleteContainer(containerName, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(containerName); - return containerClient.delete(updatedOptions); - }); + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } } - /** - * Restore a previously deleted Blob container. - * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. - * - * @param deletedContainerName - Name of the previously deleted container. - * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. - * @param options - Options to configure Container Restore operation. - * @returns Container deletion response. - */ - async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); - const containerContext = containerClient["storageClientContext"].container; - const containerUndeleteResponse = (0, utils_common_js_2.assertResponse)(await containerContext.restore({ - deletedContainerName, - deletedContainerVersion, - tracingOptions: updatedOptions.tracingOptions - })); - return { containerClient, containerUndeleteResponse }; - }); + throw new Error("invalid varint"); + } + exports2.varint64read = varint64read; + function varint64write(lo, hi, bytes) { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !(shift >>> 7 == 0 && hi == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; + } } - /** - * Gets the properties of a storage account’s Blob service, including properties - * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties - * - * @param options - Options to the Service Get Properties operation. - * @returns Response data for the Service Get Properties operation. - */ - async getProperties(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); + const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; + const hasMoreBits = !(hi >> 3 == 0); + bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); + if (!hasMoreBits) { + return; } - /** - * Sets properties for a storage account’s Blob service endpoint, including properties - * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-service-properties - * - * @param properties - - * @param options - Options to the Service Set Properties operation. - * @returns Response data for the Service Set Properties operation. - */ - async setProperties(properties, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.setProperties(properties, { - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !(shift >>> 7 == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; + } } - /** - * Retrieves statistics related to replication for the Blob service. It is only - * available on the secondary location endpoint when read-access geo-redundant - * replication is enabled for the storage account. - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-stats - * - * @param options - Options to the Service Get Statistics operation. - * @returns Response data for the Service Get Statistics operation. - */ - async getStatistics(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getStatistics({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); + bytes.push(hi >>> 31 & 1); + } + exports2.varint64write = varint64write; + var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); + function int64fromString(dec) { + let minus = dec[0] == "-"; + if (minus) + dec = dec.slice(1); + const base = 1e6; + let lowBits = 0; + let highBits = 0; + function add1e6digit(begin, end) { + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + if (lowBits >= TWO_PWR_32_DBL) { + highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); + lowBits = lowBits % TWO_PWR_32_DBL; + } } - /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://learn.microsoft.com/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. - */ - async getAccountInfo(options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.getAccountInfo({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - }); + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return [minus, lowBits, highBits]; + } + exports2.int64fromString = int64fromString; + function int64toString(bitsLow, bitsHigh) { + if (bitsHigh >>> 0 <= 2097151) { + return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); } - /** - * Returns a list of the containers under the specified account. - * @see https://learn.microsoft.com/rest/api/storageservices/list-containers2 - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to the Service List Container Segment operation. - * @returns Response data for the Service List Container Segment operation. - */ - async listContainersSegment(marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { - return (0, utils_common_js_2.assertResponse)(await this.serviceContext.listContainersSegment({ - abortSignal: options.abortSignal, - marker, - ...options, - include: typeof options.include === "string" ? [options.include] : options.include, - tracingOptions: updatedOptions.tracingOptions - })); - }); + let low = bitsLow & 16777215; + let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; + let high = bitsHigh >> 16 & 65535; + let digitA = low + mid * 6777216 + high * 6710656; + let digitB = mid + high * 8147497; + let digitC = high * 2; + let base = 1e7; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags - * match a given search expression. Filter blobs searches across all containers within a - * storage account but can be scoped within the expression to a single container. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { - const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.filterBlobs({ - abortSignal: options.abortSignal, - where: tagFilterSqlExpression, - marker, - maxPageSize: options.maxPageSize, - tracingOptions: updatedOptions.tracingOptions - })); - const wrappedResponse = { - ...response, - _response: response._response, - // _response is made non-enumerable - blobs: response.blobs.map((blob) => { - let tagValue = ""; - if (blob.tags?.blobTagSet.length === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return { ...blob, tags: (0, utils_common_js_1.toTags)(blob.tags), tagValue }; - }) - }; - return wrappedResponse; - }); + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; } - /** - * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async *findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - let response; - if (!!marker || marker === void 0) { - do { - response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options); - response.blobs = response.blobs || []; - marker = response.continuationToken; - yield response; - } while (marker); + function decimalFrom1e7(digit1e7, needLeadingZeros) { + let partial = digit1e7 ? String(digit1e7) : ""; + if (needLeadingZeros) { + return "0000000".slice(partial.length) + partial; } + return partial; } - /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. - */ - async *findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - let marker; - for await (const segment of this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)) { - yield* segment.blobs; + return decimalFrom1e7( + digitC, + /*needLeadingZeros=*/ + 0 + ) + decimalFrom1e7( + digitB, + /*needLeadingZeros=*/ + digitC + ) + // If the final 1e7 digit didn't need leading zeros, we would have + // returned via the trivial code path at the top. + decimalFrom1e7( + digitA, + /*needLeadingZeros=*/ + 1 + ); + } + exports2.int64toString = int64toString; + function varint32write(value, bytes) { + if (value >= 0) { + while (value > 127) { + bytes.push(value & 127 | 128); + value = value >>> 7; + } + bytes.push(value); + } else { + for (let i = 0; i < 9; i++) { + bytes.push(value & 127 | 128); + value = value >> 7; } + bytes.push(1); } + } + exports2.varint32write = varint32write; + function varint32read() { + let b = this.buf[this.pos++]; + let result = b & 127; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 7; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 14; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 21; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 15) << 28; + for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + if ((b & 128) != 0) + throw new Error("invalid varint"); + this.assertBounds(); + return result >>> 0; + } + exports2.varint32read = varint32read; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js +var require_pb_long = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; + var goog_varint_1 = require_goog_varint(); + var BI; + function detectBi() { + const dv = new DataView(new ArrayBuffer(8)); + const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; + BI = ok ? { + MIN: BigInt("-9223372036854775808"), + MAX: BigInt("9223372036854775807"), + UMIN: BigInt("0"), + UMAX: BigInt("18446744073709551615"), + C: BigInt, + V: dv + } : void 0; + } + exports2.detectBi = detectBi; + detectBi(); + function assertBi(bi) { + if (!bi) + throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); + } + var RE_DECIMAL_STR = /^-?[0-9]+$/; + var TWO_PWR_32_DBL = 4294967296; + var HALF_2_PWR_32 = 2147483648; + var SharedPbLong = class { /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-blob-service-properties - * - * ```ts snippet:BlobServiceClientFindBlobsByTags - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * // Use for await to iterate the blobs - * let i = 1; - * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * - * // Use iter.next() to iterate the blobs - * i = 1; - * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Blob ${i++}: ${value.name}`); - * ({ value, done } = await iter.next()); - * } - * - * // Use byPage() to iterate the blobs - * i = 1; - * for await (const page of blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ maxPageSize: 20 })) { - * for (const blob of page.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Use paging with a marker - * i = 1; - * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * Create a new instance with the given bits. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = { - ...options - }; - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...listSegmentOptions - }); - } - }; + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; } /** - * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list containers operation. + * Is this instance equal to 0? */ - async *listSegments(marker, options = {}) { - let listContainersSegmentResponse; - if (!!marker || marker === void 0) { - do { - listContainersSegmentResponse = await this.listContainersSegment(marker, options); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker = listContainersSegmentResponse.continuationToken; - yield await listContainersSegmentResponse; - } while (marker); - } + isZero() { + return this.lo == 0 && this.hi == 0; } /** - * Returns an AsyncIterableIterator for Container Items - * - * @param options - Options to list containers operation. + * Convert to a native number. */ - async *listItems(options = {}) { - let marker; - for await (const segment of this.listSegments(marker, options)) { - yield* segment.containerItems; - } + toNumber() { + let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); + if (!Number.isSafeInteger(result)) + throw new Error("cannot convert to safe number"); + return result; } + }; + var PbULong = class _PbULong extends SharedPbLong { /** - * Returns an async iterable iterator to list all the containers - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the containers in pages. - * - * ```ts snippet:BlobServiceClientListContainers - * import { BlobServiceClient } from "@azure/storage-blob"; - * import { DefaultAzureCredential } from "@azure/identity"; - * - * const account = ""; - * const blobServiceClient = new BlobServiceClient( - * `https://${account}.blob.core.windows.net`, - * new DefaultAzureCredential(), - * ); - * - * // Use for await to iterate the containers - * let i = 1; - * for await (const container of blobServiceClient.listContainers()) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * - * // Use iter.next() to iterate the containers - * i = 1; - * const iter = blobServiceClient.listContainers(); - * let { value, done } = await iter.next(); - * while (!done) { - * console.log(`Container ${i++}: ${value.name}`); - * ({ value, done } = await iter.next()); - * } - * - * // Use byPage() to iterate the containers - * i = 1; - * for await (const page of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * for (const container of page.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * - * // Use paging with a marker - * i = 1; - * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .listContainers() - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints 10 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * ``` - * - * @param options - Options to list containers. - * @returns An asyncIterableIterator that supports paging. + * Create instance from a `string`, `number` or `bigint`. */ - listContainers(options = {}) { - if (options.prefix === "") { - options.prefix = void 0; - } - const include = []; - if (options.includeDeleted) { - include.push("deleted"); - } - if (options.includeMetadata) { - include.push("metadata"); - } - if (options.includeSystem) { - include.push("system"); - } - const listSegmentOptions = { - ...options, - ...include.length > 0 ? { include } : {} - }; - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: (settings = {}) => { - return this.listSegments(settings.continuationToken, { - maxPageSize: settings.maxPageSize, - ...listSegmentOptions - }); + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.UMIN) + throw new Error("signed value for ulong"); + if (value > BI.UMAX) + throw new Error("ulong too large"); + BI.V.setBigUint64(0, value, true); + return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) + throw new Error("signed value for ulong"); + return new _PbULong(lo, hi); + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + if (value < 0) + throw new Error("signed value for ulong"); + return new _PbULong(value, value / TWO_PWR_32_DBL); } - }; + throw new Error("unknown value " + typeof value); } /** - * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). - * - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * - * @see https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key - * - * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time - * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + * Convert to decimal string. */ - async getUserDelegationKey(startsOn, expiresOn, options = {}) { - return tracing_js_1.tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { - const response = (0, utils_common_js_2.assertResponse)(await this.serviceContext.getUserDelegationKey({ - startsOn: (0, utils_common_js_2.truncatedISO8061Date)(startsOn, false), - expiresOn: (0, utils_common_js_2.truncatedISO8061Date)(expiresOn, false) - }, { - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - })); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = { - _response: response._response, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - version: response.version, - date: response.date, - errorCode: response.errorCode, - ...userDelegationKey - }; - return res; - }); + toString() { + return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); } /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://learn.microsoft.com/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this service. + * Convert to native bigint. */ - getBlobBatchClient() { - return new BlobBatchClient_js_1.BlobBatchClient(this.url, this.pipeline); + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigUint64(0, true); } + }; + exports2.PbULong = PbULong; + PbULong.ZERO = new PbULong(0, 0); + var PbLong = class _PbLong extends SharedPbLong { /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * Create instance from a `string`, `number` or `bigint`. */ - generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn = new Date(now.getTime() + 3600 * 1e3); - } - const sas = (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParameters)({ - permissions, - expiresOn, - resourceTypes, - services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), - ...options - }, this.credential).toString(); - return (0, utils_common_js_1.appendToURLQuery)(this.url, sas); + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.MIN) + throw new Error("signed long too small"); + if (value > BI.MAX) + throw new Error("signed long too large"); + BI.V.setBigInt64(0, value, true); + return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) { + if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) + throw new Error("signed long too small"); + } else if (hi >= HALF_2_PWR_32) + throw new Error("signed long too large"); + let pbl = new _PbLong(lo, hi); + return minus ? pbl.negate() : pbl; + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + } + throw new Error("unknown value " + typeof value); } /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on - * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://learn.microsoft.com/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * Do we have a minus sign? */ - generateSasStringToSign(expiresOn, permissions = AccountSASPermissions_js_1.AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential_js_1.StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn = new Date(now.getTime() + 3600 * 1e3); - } - return (0, AccountSASSignatureValues_js_1.generateAccountSASQueryParametersInternal)({ - permissions, - expiresOn, - resourceTypes, - services: AccountSASServices_js_1.AccountSASServices.parse("b").toString(), - ...options - }, this.credential).stringToSign; - } - }; - exports2.BlobServiceClient = BlobServiceClient; - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js -var require_BatchResponse = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/BatchResponse.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js -var require_generatedModels = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/generatedModels.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.KnownEncryptionAlgorithmType = void 0; - var KnownEncryptionAlgorithmType; - (function(KnownEncryptionAlgorithmType2) { - KnownEncryptionAlgorithmType2["AES256"] = "AES256"; - })(KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = KnownEncryptionAlgorithmType = {})); - } -}); - -// node_modules/@azure/storage-blob/dist/commonjs/index.js -var require_commonjs15 = __commonJS({ - "node_modules/@azure/storage-blob/dist/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.logger = exports2.RestError = exports2.BaseRequestPolicy = exports2.StorageOAuthScopes = exports2.newPipeline = exports2.isPipelineLike = exports2.Pipeline = exports2.getBlobServiceAccountAudience = exports2.StorageBlobAudience = exports2.PremiumPageBlobTier = exports2.BlockBlobTier = exports2.generateBlobSASQueryParameters = exports2.generateAccountSASQueryParameters = void 0; - var tslib_1 = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); - var core_rest_pipeline_1 = require_commonjs6(); - Object.defineProperty(exports2, "RestError", { enumerable: true, get: function() { - return core_rest_pipeline_1.RestError; - } }); - tslib_1.__exportStar(require_BlobServiceClient(), exports2); - tslib_1.__exportStar(require_Clients(), exports2); - tslib_1.__exportStar(require_ContainerClient(), exports2); - tslib_1.__exportStar(require_BlobLeaseClient(), exports2); - tslib_1.__exportStar(require_AccountSASPermissions(), exports2); - tslib_1.__exportStar(require_AccountSASResourceTypes(), exports2); - tslib_1.__exportStar(require_AccountSASServices(), exports2); - var AccountSASSignatureValues_js_1 = require_AccountSASSignatureValues(); - Object.defineProperty(exports2, "generateAccountSASQueryParameters", { enumerable: true, get: function() { - return AccountSASSignatureValues_js_1.generateAccountSASQueryParameters; - } }); - tslib_1.__exportStar(require_BlobBatch(), exports2); - tslib_1.__exportStar(require_BlobBatchClient(), exports2); - tslib_1.__exportStar(require_BatchResponse(), exports2); - tslib_1.__exportStar(require_BlobSASPermissions(), exports2); - var BlobSASSignatureValues_js_1 = require_BlobSASSignatureValues(); - Object.defineProperty(exports2, "generateBlobSASQueryParameters", { enumerable: true, get: function() { - return BlobSASSignatureValues_js_1.generateBlobSASQueryParameters; - } }); - tslib_1.__exportStar(require_StorageBrowserPolicyFactory2(), exports2); - tslib_1.__exportStar(require_ContainerSASPermissions(), exports2); - tslib_1.__exportStar(require_AnonymousCredential(), exports2); - tslib_1.__exportStar(require_Credential(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredential(), exports2); - var models_js_1 = require_models2(); - Object.defineProperty(exports2, "BlockBlobTier", { enumerable: true, get: function() { - return models_js_1.BlockBlobTier; - } }); - Object.defineProperty(exports2, "PremiumPageBlobTier", { enumerable: true, get: function() { - return models_js_1.PremiumPageBlobTier; - } }); - Object.defineProperty(exports2, "StorageBlobAudience", { enumerable: true, get: function() { - return models_js_1.StorageBlobAudience; - } }); - Object.defineProperty(exports2, "getBlobServiceAccountAudience", { enumerable: true, get: function() { - return models_js_1.getBlobServiceAccountAudience; - } }); - var Pipeline_js_1 = require_Pipeline(); - Object.defineProperty(exports2, "Pipeline", { enumerable: true, get: function() { - return Pipeline_js_1.Pipeline; - } }); - Object.defineProperty(exports2, "isPipelineLike", { enumerable: true, get: function() { - return Pipeline_js_1.isPipelineLike; - } }); - Object.defineProperty(exports2, "newPipeline", { enumerable: true, get: function() { - return Pipeline_js_1.newPipeline; - } }); - Object.defineProperty(exports2, "StorageOAuthScopes", { enumerable: true, get: function() { - return Pipeline_js_1.StorageOAuthScopes; - } }); - tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); - var RequestPolicy_js_1 = require_RequestPolicy(); - Object.defineProperty(exports2, "BaseRequestPolicy", { enumerable: true, get: function() { - return RequestPolicy_js_1.BaseRequestPolicy; - } }); - tslib_1.__exportStar(require_AnonymousCredentialPolicy(), exports2); - tslib_1.__exportStar(require_CredentialPolicy(), exports2); - tslib_1.__exportStar(require_StorageRetryPolicyFactory(), exports2); - tslib_1.__exportStar(require_StorageSharedKeyCredentialPolicy(), exports2); - tslib_1.__exportStar(require_SASQueryParameters(), exports2); - tslib_1.__exportStar(require_generatedModels(), exports2); - var log_js_1 = require_log6(); - Object.defineProperty(exports2, "logger", { enumerable: true, get: function() { - return log_js_1.logger; - } }); - } -}); - -// node_modules/@actions/cache/lib/internal/shared/errors.js -var require_errors3 = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RateLimitError = exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; - var FilesNotFoundError = class extends Error { - constructor(files = []) { - let message = "No files were found to upload"; - if (files.length > 0) { - message += `: ${files.join(", ")}`; - } - super(message); - this.files = files; - this.name = "FilesNotFoundError"; - } - }; - exports2.FilesNotFoundError = FilesNotFoundError; - var InvalidResponseError = class extends Error { - constructor(message) { - super(message); - this.name = "InvalidResponseError"; - } - }; - exports2.InvalidResponseError = InvalidResponseError; - var CacheNotFoundError = class extends Error { - constructor(message = "Cache not found") { - super(message); - this.name = "CacheNotFoundError"; - } - }; - exports2.CacheNotFoundError = CacheNotFoundError; - var GHESNotSupportedError = class extends Error { - constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { - super(message); - this.name = "GHESNotSupportedError"; + isNegative() { + return (this.hi & HALF_2_PWR_32) !== 0; } - }; - exports2.GHESNotSupportedError = GHESNotSupportedError; - var NetworkError = class extends Error { - constructor(code) { - const message = `Unable to make request: ${code} -If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; - super(message); - this.code = code; - this.name = "NetworkError"; + /** + * Negate two's complement. + * Invert all the bits and add one to the result. + */ + negate() { + let hi = ~this.hi, lo = this.lo; + if (lo) + lo = ~lo + 1; + else + hi += 1; + return new _PbLong(lo, hi); } - }; - exports2.NetworkError = NetworkError; - NetworkError.isNetworkErrorCode = (code) => { - if (!code) - return false; - return [ - "ECONNRESET", - "ENOTFOUND", - "ETIMEDOUT", - "ECONNREFUSED", - "EHOSTUNREACH" - ].includes(code); - }; - var UsageError = class extends Error { - constructor() { - const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. -More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; - super(message); - this.name = "UsageError"; + /** + * Convert to decimal string. + */ + toString() { + if (BI) + return this.toBigInt().toString(); + if (this.isNegative()) { + let n = this.negate(); + return "-" + goog_varint_1.int64toString(n.lo, n.hi); + } + return goog_varint_1.int64toString(this.lo, this.hi); } - }; - exports2.UsageError = UsageError; - UsageError.isUsageErrorMessage = (msg) => { - if (!msg) - return false; - return msg.includes("insufficient usage"); - }; - var RateLimitError = class extends Error { - constructor(message) { - super(message); - this.name = "RateLimitError"; + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigInt64(0, true); } }; - exports2.RateLimitError = RateLimitError; + exports2.PbLong = PbLong; + PbLong.ZERO = new PbLong(0, 0); } }); -// node_modules/@actions/cache/lib/internal/uploadUtils.js -var require_uploadUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js +var require_binary_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BinaryReader = exports2.binaryReadOptions = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var defaultsRead = { + readUnknownField: true, + readerFactory: (bytes) => new BinaryReader(bytes) + }; + function binaryReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; + } + exports2.binaryReadOptions = binaryReadOptions; + var BinaryReader = class { + constructor(buf, textDecoder) { + this.varint64 = goog_varint_1.varint64read; + this.uint32 = goog_varint_1.varint32read; + this.buf = buf; + this.len = buf.length; + this.pos = 0; + this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { + fatal: true, + ignoreBOM: true }); } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + /** + * Reads a tag - field number and wire type. + */ + tag() { + let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); + return [fieldNo, wireType]; + } + /** + * Skip one element on the wire and return the skipped data. + * Supports WireType.StartGroup since v2.0.0-alpha.23. + */ + skip(wireType) { + let start = this.pos; + switch (wireType) { + case binary_format_contract_1.WireType.Varint: + while (this.buf[this.pos++] & 128) { + } + break; + case binary_format_contract_1.WireType.Bit64: + this.pos += 4; + case binary_format_contract_1.WireType.Bit32: + this.pos += 4; + break; + case binary_format_contract_1.WireType.LengthDelimited: + let len = this.uint32(); + this.pos += len; + break; + case binary_format_contract_1.WireType.StartGroup: + let t; + while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { + this.skip(t); + } + break; + default: + throw new Error("cant skip wire type " + wireType); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UploadProgress = void 0; - exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; - var core12 = __importStar2(require_core()); - var storage_blob_1 = require_commonjs15(); - var errors_1 = require_errors3(); - var UploadProgress = class { - constructor(contentLength) { - this.contentLength = contentLength; - this.sentBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); + this.assertBounds(); + return this.buf.subarray(start, this.pos); } /** - * Sets the number of bytes sent - * - * @param sentBytes the number of bytes sent + * Throws error if position in byte array is out of range. */ - setSentBytes(sentBytes) { - this.sentBytes = sentBytes; + assertBounds() { + if (this.pos > this.len) + throw new RangeError("premature EOF"); } /** - * Returns the total number of bytes transferred. + * Read a `int32` field, a signed 32 bit varint. */ - getTransferredBytes() { - return this.sentBytes; + int32() { + return this.uint32() | 0; } /** - * Returns true if the upload is complete. + * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + sint32() { + let zze = this.uint32(); + return zze >>> 1 ^ -(zze & 1); } /** - * Prints the current upload stats. Once the upload completes, this will print one - * last line and then stop. + * Read a `int64` field, a signed 64-bit varint. */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.sentBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } + int64() { + return new pb_long_1.PbLong(...this.varint64()); } /** - * Returns a function used to handle TransferProgressEvents. + * Read a `uint64` field, an unsigned 64-bit varint. */ - onProgress() { - return (progress) => { - this.setSentBytes(progress.loadedBytes); - }; + uint64() { + return new pb_long_1.PbULong(...this.varint64()); } /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + sint64() { + let [lo, hi] = this.varint64(); + let s = -(lo & 1); + lo = (lo >>> 1 | (hi & 1) << 31) ^ s; + hi = hi >>> 1 ^ s; + return new pb_long_1.PbLong(lo, hi); } /** - * Stops the timer that displays the stats. As this typically indicates the upload - * is complete, this will display one last line, unless the last line has already - * been written. + * Read a `bool` field, a variant. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); - } - }; - exports2.UploadProgress = UploadProgress; - function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const blobClient = new storage_blob_1.BlobClient(signedUploadURL); - const blockBlobClient = blobClient.getBlockBlobClient(); - const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); - const uploadOptions = { - blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, - concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, - // maximum number of parallel transfer workers - maxSingleShotSize: 128 * 1024 * 1024, - // 128 MiB initial transfer size - onProgress: uploadProgress.onProgress() - }; - try { - uploadProgress.startDisplayTimer(); - core12.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); - const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); - if (response._response.status >= 400) { - throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); - } - return response; - } catch (error3) { - core12.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error3.message}`); - throw error3; - } finally { - uploadProgress.stopDisplayTimer(); - } - }); - } - } -}); - -// node_modules/@actions/cache/lib/internal/requestUtils.js -var require_requestUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isSuccessStatusCode = isSuccessStatusCode; - exports2.isServerErrorStatusCode = isServerErrorStatusCode; - exports2.isRetryableStatusCode = isRetryableStatusCode; - exports2.retry = retry2; - exports2.retryTypedResponse = retryTypedResponse; - exports2.retryHttpClientResponse = retryHttpClientResponse; - var core12 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var constants_1 = require_constants12(); - function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; + bool() { + let [lo, hi] = this.varint64(); + return lo !== 0 || hi !== 0; } - return statusCode >= 200 && statusCode < 300; - } - function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; + /** + * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + */ + fixed32() { + return this.view.getUint32((this.pos += 4) - 4, true); } - return statusCode >= 500; - } - function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; + /** + * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + */ + sfixed32() { + return this.view.getInt32((this.pos += 4) - 4, true); } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); - } - function sleep(milliseconds) { - return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve2) => setTimeout(resolve2, milliseconds)); - }); - } - function retry2(name_1, method_1, getStatusCode_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay, onError = void 0) { - let errorMessage = ""; - let attempt = 1; - while (attempt <= maxAttempts) { - let response = void 0; - let statusCode = void 0; - let isRetryable = false; - try { - response = yield method(); - } catch (error3) { - if (onError) { - response = onError(error3); - } - isRetryable = true; - errorMessage = error3.message; - } - if (response) { - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - } - if (statusCode) { - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - core12.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core12.debug(`${name} - Error is not retryable`); - break; - } - yield sleep(delay2); - attempt++; - } - throw Error(`${name} failed: ${errorMessage}`); - }); - } - function retryTypedResponse(name_1, method_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { - return yield retry2( - name, - method, - (response) => response.statusCode, - maxAttempts, - delay2, - // If the error object contains the statusCode property, extract it and return - // an TypedResponse so it can be processed by the retry logic. - (error3) => { - if (error3 instanceof http_client_1.HttpClientError) { - return { - statusCode: error3.statusCode, - result: null, - headers: {}, - error: error3 - }; - } else { - return void 0; - } - } - ); - }); - } - function retryHttpClientResponse(name_1, method_1) { - return __awaiter2(this, arguments, void 0, function* (name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay2 = constants_1.DefaultRetryDelay) { - return yield retry2(name, method, (response) => response.message.statusCode, maxAttempts, delay2); - }); - } - } -}); - -// node_modules/@azure/abort-controller/dist/index.js -var require_dist4 = __commonJS({ - "node_modules/@azure/abort-controller/dist/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var listenersMap = /* @__PURE__ */ new WeakMap(); - var abortedMap = /* @__PURE__ */ new WeakMap(); - var AbortSignal2 = class _AbortSignal { - constructor() { - this.onabort = null; - listenersMap.set(this, []); - abortedMap.set(this, false); + /** + * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + */ + fixed64() { + return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); } /** - * Status of whether aborted or not. - * - * @readonly + * Read a `fixed64` field, a signed, fixed-length 64-bit integer. */ - get aborted() { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); + sfixed64() { + return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); } /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly + * Read a `float` field, 32-bit floating point number. */ - static get none() { - return new _AbortSignal(); + float() { + return this.view.getFloat32((this.pos += 4) - 4, true); } /** - * Added new "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be added + * Read a `double` field, a 64-bit floating point number. */ - addEventListener(_type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - listeners.push(listener); + double() { + return this.view.getFloat64((this.pos += 8) - 8, true); } /** - * Remove "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be removed + * Read a `bytes` field, length-delimited arbitrary data. */ - removeEventListener(_type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - const index = listeners.indexOf(listener); - if (index > -1) { - listeners.splice(index, 1); - } + bytes() { + let len = this.uint32(); + let start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); } /** - * Dispatches a synthetic event to the AbortSignal. + * Read a `string` field, length-delimited data converted to UTF-8 text. */ - dispatchEvent(_event) { - throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + string() { + return this.textDecoder.decode(this.bytes()); } }; - function abortSignal(signal) { - if (signal.aborted) { - return; - } - if (signal.onabort) { - signal.onabort.call(signal); - } - const listeners = listenersMap.get(signal); - if (listeners) { - listeners.slice().forEach((listener) => { - listener.call(signal, { type: "abort" }); - }); + exports2.BinaryReader = BinaryReader; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/assert.js +var require_assert = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; + function assert(condition, msg) { + if (!condition) { + throw new Error(msg); } - abortedMap.set(signal, true); } - var AbortError = class extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } + exports2.assert = assert; + function assertNever2(value, msg) { + throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); + } + exports2.assertNever = assertNever2; + var FLOAT32_MAX = 34028234663852886e22; + var FLOAT32_MIN = -34028234663852886e22; + var UINT32_MAX = 4294967295; + var INT32_MAX = 2147483647; + var INT32_MIN = -2147483648; + function assertInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid int 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) + throw new Error("invalid int 32: " + arg); + } + exports2.assertInt32 = assertInt32; + function assertUInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid uint 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) + throw new Error("invalid uint 32: " + arg); + } + exports2.assertUInt32 = assertUInt32; + function assertFloat32(arg) { + if (typeof arg !== "number") + throw new Error("invalid float 32: " + typeof arg); + if (!Number.isFinite(arg)) + return; + if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) + throw new Error("invalid float 32: " + arg); + } + exports2.assertFloat32 = assertFloat32; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js +var require_binary_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var assert_1 = require_assert(); + var defaultsWrite = { + writeUnknownFields: true, + writerFactory: () => new BinaryWriter() }; - var AbortController2 = class { - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - constructor(parentSignals) { - this._signal = new AbortSignal2(); - if (!parentSignals) { - return; - } - if (!Array.isArray(parentSignals)) { - parentSignals = arguments; - } - for (const parentSignal of parentSignals) { - if (parentSignal.aborted) { - this.abort(); - } else { - parentSignal.addEventListener("abort", () => { - this.abort(); - }); - } + function binaryWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + } + exports2.binaryWriteOptions = binaryWriteOptions; + var BinaryWriter = class { + constructor(textEncoder) { + this.stack = []; + this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); + this.chunks = []; + this.buf = []; + } + /** + * Return all bytes written and reset this writer. + */ + finish() { + this.chunks.push(new Uint8Array(this.buf)); + let len = 0; + for (let i = 0; i < this.chunks.length; i++) + len += this.chunks[i].length; + let bytes = new Uint8Array(len); + let offset = 0; + for (let i = 0; i < this.chunks.length; i++) { + bytes.set(this.chunks[i], offset); + offset += this.chunks[i].length; } + this.chunks = []; + return bytes; } /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. + * Start a new fork for length-delimited data like a message + * or a packed repeated field. * - * @readonly + * Must be joined later with `join()`. */ - get signal() { - return this._signal; + fork() { + this.stack.push({ chunks: this.chunks, buf: this.buf }); + this.chunks = []; + this.buf = []; + return this; } /** - * Signal that any operations passed this controller's associated abort signal - * to cancel any remaining work and throw an `AbortError`. + * Join the last fork. Write its length and bytes, then + * return to the previous state. */ - abort() { - abortSignal(this._signal); + join() { + let chunk = this.finish(); + let prev = this.stack.pop(); + if (!prev) + throw new Error("invalid state, fork stack empty"); + this.chunks = prev.chunks; + this.buf = prev.buf; + this.uint32(chunk.byteLength); + return this.raw(chunk); } /** - * Creates a new AbortSignal instance that will abort after the provided ms. - * @param ms - Elapsed time in milliseconds to trigger an abort. + * Writes a tag (field number and wire type). + * + * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * + * Generated code should compute the tag ahead of time and call `uint32()`. */ - static timeout(ms) { - const signal = new AbortSignal2(); - const timer = setTimeout(abortSignal, ms, signal); - if (typeof timer.unref === "function") { - timer.unref(); - } - return signal; - } - }; - exports2.AbortController = AbortController2; - exports2.AbortError = AbortError; - exports2.AbortSignal = AbortSignal2; - } -}); - -// node_modules/@actions/cache/lib/internal/downloadUtils.js -var require_downloadUtils = __commonJS({ - "node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + tag(fieldNo, type2) { + return this.uint32((fieldNo << 3 | type2) >>> 0); } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + /** + * Write a chunk of raw bytes. + */ + raw(chunk) { + if (this.buf.length) { + this.chunks.push(new Uint8Array(this.buf)); + this.buf = []; } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); + this.chunks.push(chunk); + return this; } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + /** + * Write a `uint32` value, an unsigned 32 bit varint. + */ + uint32(value) { + assert_1.assertUInt32(value); + while (value > 127) { + this.buf.push(value & 127 | 128); + value = value >>> 7; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DownloadProgress = void 0; - exports2.downloadCacheHttpClient = downloadCacheHttpClient; - exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; - exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var core12 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var storage_blob_1 = require_commonjs15(); - var buffer = __importStar2(require("buffer")); - var fs = __importStar2(require("fs")); - var stream = __importStar2(require("stream")); - var util = __importStar2(require("util")); - var utils = __importStar2(require_cacheUtils()); - var constants_1 = require_constants12(); - var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist4(); - function pipeResponseToStream(response, output) { - return __awaiter2(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); - }); - } - var DownloadProgress = class { - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); + this.buf.push(value); + return this; } /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment + * Write a `int32` value, a signed 32 bit varint. */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core12.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + int32(value) { + assert_1.assertInt32(value); + goog_varint_1.varint32write(value, this.buf); + return this; } /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received + * Write a `bool` value, a variant. */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; + bool(value) { + this.buf.push(value ? 1 : 0); + return this; } /** - * Returns the total number of bytes transferred. + * Write a `bytes` value, length-delimited arbitrary data. */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; + bytes(value) { + this.uint32(value.byteLength); + return this.raw(value); } /** - * Returns true if the download is complete. + * Write a `string` value, length-delimited data converted to UTF-8 text. */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + string(value) { + let chunk = this.textEncoder.encode(value); + this.uint32(chunk.byteLength); + return this.raw(chunk); } /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. + * Write a `float` value, 32-bit floating point number. */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core12.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; - } + float(value) { + assert_1.assertFloat32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setFloat32(0, value, true); + return this.raw(chunk); } /** - * Returns a function used to handle TransferProgressEvents. + * Write a `double` value, a 64-bit floating point number. */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; + double(value) { + let chunk = new Uint8Array(8); + new DataView(chunk.buffer).setFloat64(0, value, true); + return this.raw(chunk); } /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = () => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }; - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + fixed32(value) { + assert_1.assertUInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setUint32(0, value, true); + return this.raw(chunk); } /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. + * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); - } - }; - exports2.DownloadProgress = DownloadProgress; - function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter2(this, void 0, void 0, function* () { - const writeStream = fs.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.get(archiveLocation); - })); - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core12.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - const contentLengthHeader = downloadResponse.message.headers["content-length"]; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } else { - core12.debug("Unable to validate download, no Content-Length header"); - } - }); - } - function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const archiveDescriptor = yield fs.promises.open(archivePath, "w"); - const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { - socketTimeout: options.timeoutInMs, - keepAlive: true - }); - try { - const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter2(this, void 0, void 0, function* () { - return yield httpClient.request("HEAD", archiveLocation, null, {}); - })); - const lengthHeader = res.message.headers["content-length"]; - if (lengthHeader === void 0 || lengthHeader === null) { - throw new Error("Content-Length not found on blob response"); - } - const length = parseInt(lengthHeader); - if (Number.isNaN(length)) { - throw new Error(`Could not interpret Content-Length: ${length}`); - } - const downloads = []; - const blockSize = 4 * 1024 * 1024; - for (let offset = 0; offset < length; offset += blockSize) { - const count = Math.min(blockSize, length - offset); - downloads.push({ - offset, - promiseGetter: () => __awaiter2(this, void 0, void 0, function* () { - return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); - }) - }); - } - downloads.reverse(); - let actives = 0; - let bytesDownloaded = 0; - const progress = new DownloadProgress(length); - progress.startDisplayTimer(); - const progressFn = progress.onProgress(); - const activeDownloads = []; - let nextDownload; - const waitAndWrite = () => __awaiter2(this, void 0, void 0, function* () { - const segment = yield Promise.race(Object.values(activeDownloads)); - yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); - actives--; - delete activeDownloads[segment.offset]; - bytesDownloaded += segment.count; - progressFn({ loadedBytes: bytesDownloaded }); - }); - while (nextDownload = downloads.pop()) { - activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); - actives++; - if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { - yield waitAndWrite(); - } - } - while (actives > 0) { - yield waitAndWrite(); - } - } finally { - httpClient.dispose(); - yield archiveDescriptor.close(); - } - }); - } - function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { - return __awaiter2(this, void 0, void 0, function* () { - const retries = 5; - let failures = 0; - while (true) { - try { - const timeout = 3e4; - const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); - if (typeof result === "string") { - throw new Error("downloadSegmentRetry failed due to timeout"); - } - return result; - } catch (err) { - if (failures >= retries) { - throw err; - } - failures++; - } - } - }); - } - function downloadSegment(httpClient, archiveLocation, offset, count) { - return __awaiter2(this, void 0, void 0, function* () { - const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter2(this, void 0, void 0, function* () { - return yield httpClient.get(archiveLocation, { - Range: `bytes=${offset}-${offset + count - 1}` - }); - })); - if (!partRes.readBodyBuffer) { - throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); - } - return { - offset, - count, - buffer: yield partRes.readBodyBuffer() - }; - }); - } - function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - var _a; - const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - core12.debug("Unable to determine content length, downloading file with http-client..."); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } else { - const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs.openSync(archivePath, "w"); - try { - downloadProgress.startDisplayTimer(); - const controller = new abort_controller_1.AbortController(); - const abortSignal = controller.signal; - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { - abortSignal, - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - })); - if (result === "timeout") { - controller.abort(); - throw new Error("Aborting cache download as the download time exceeded the timeout."); - } else if (Buffer.isBuffer(result)) { - fs.writeFileSync(fd, result); - } - } - } finally { - downloadProgress.stopDisplayTimer(); - fs.closeSync(fd); - } - } - }); - } - var promiseWithTimeout = (timeoutMs, promise) => __awaiter2(void 0, void 0, void 0, function* () { - let timeoutHandle; - const timeoutPromise = new Promise((resolve2) => { - timeoutHandle = setTimeout(() => resolve2("timeout"), timeoutMs); - }); - return Promise.race([promise, timeoutPromise]).then((result) => { - clearTimeout(timeoutHandle); - return result; - }); - }); - } -}); - -// node_modules/@actions/cache/lib/options.js -var require_options = __commonJS({ - "node_modules/@actions/cache/lib/options.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUploadOptions = getUploadOptions; - exports2.getDownloadOptions = getDownloadOptions; - var core12 = __importStar2(require_core()); - function getUploadOptions(copy) { - const result = { - useAzureSdk: false, - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.uploadConcurrency === "number") { - result.uploadConcurrency = copy.uploadConcurrency; - } - if (typeof copy.uploadChunkSize === "number") { - result.uploadChunkSize = copy.uploadChunkSize; - } + sfixed32(value) { + assert_1.assertInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setInt32(0, value, true); + return this.raw(chunk); } - result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; - result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core12.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; - } - function getDownloadOptions(copy) { - const result = { - useAzureSdk: false, - concurrentBlobDownloads: true, - downloadConcurrency: 8, - timeoutInMs: 3e4, - segmentTimeoutInMs: 6e5, - lookupOnly: false - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.concurrentBlobDownloads === "boolean") { - result.concurrentBlobDownloads = copy.concurrentBlobDownloads; - } - if (typeof copy.downloadConcurrency === "number") { - result.downloadConcurrency = copy.downloadConcurrency; - } - if (typeof copy.timeoutInMs === "number") { - result.timeoutInMs = copy.timeoutInMs; - } - if (typeof copy.segmentTimeoutInMs === "number") { - result.segmentTimeoutInMs = copy.segmentTimeoutInMs; - } - if (typeof copy.lookupOnly === "boolean") { - result.lookupOnly = copy.lookupOnly; - } + /** + * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + */ + sint32(value) { + assert_1.assertInt32(value); + value = (value << 1 ^ value >> 31) >>> 0; + goog_varint_1.varint32write(value, this.buf); + return this; } - const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; - if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { - result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; + /** + * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + */ + sfixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbLong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } - core12.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core12.debug(`Download concurrency: ${result.downloadConcurrency}`); - core12.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core12.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core12.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core12.debug(`Lookup only: ${result.lookupOnly}`); - return result; - } + /** + * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + */ + fixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbULong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); + } + /** + * Write a `int64` value, a signed 64-bit varint. + */ + int64(value) { + let long = pb_long_1.PbLong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; + } + /** + * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + */ + sint64(value) { + let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; + goog_varint_1.varint64write(lo, hi, this.buf); + return this; + } + /** + * Write a `uint64` value, an unsigned 64-bit varint. + */ + uint64(value) { + let long = pb_long_1.PbULong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; + } + }; + exports2.BinaryWriter = BinaryWriter; } }); -// node_modules/@actions/cache/lib/internal/config.js -var require_config = __commonJS({ - "node_modules/@actions/cache/lib/internal/config.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js +var require_json_format_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isGhes = isGhes; - exports2.getCacheServiceVersion = getCacheServiceVersion; - exports2.getCacheServiceURL = getCacheServiceURL; - function isGhes() { - const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); - const hostname = ghUrl.hostname.trimEnd().toUpperCase(); - const isGitHubHost = hostname === "GITHUB.COM"; - const isGheHost = hostname.endsWith(".GHE.COM"); - const isLocalHost = hostname.endsWith(".LOCALHOST"); - return !isGitHubHost && !isGheHost && !isLocalHost; + exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; + var defaultsWrite = { + emitDefaultValues: false, + enumAsInteger: false, + useProtoFieldName: false, + prettySpaces: 0 + }; + var defaultsRead = { + ignoreUnknownFields: false + }; + function jsonReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } - function getCacheServiceVersion() { - if (isGhes()) - return "v1"; - return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; + exports2.jsonReadOptions = jsonReadOptions; + function jsonWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } - function getCacheServiceURL() { - const version = getCacheServiceVersion(); - switch (version) { - case "v1": - return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; - case "v2": - return process.env["ACTIONS_RESULTS_URL"] || ""; - default: - throw new Error(`Unsupported cache service version: ${version}`); - } + exports2.jsonWriteOptions = jsonWriteOptions; + function mergeJsonOptions(a, b) { + var _a, _b; + let c = Object.assign(Object.assign({}, a), b); + c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; + return c; } + exports2.mergeJsonOptions = mergeJsonOptions; } }); -// node_modules/@actions/cache/package.json -var require_package2 = __commonJS({ - "node_modules/@actions/cache/package.json"(exports2, module2) { - module2.exports = { - name: "@actions/cache", - version: "5.0.5", - preview: true, - description: "Actions cache lib", - keywords: [ - "github", - "actions", - "cache" - ], - homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", - license: "MIT", - main: "lib/cache.js", - types: "lib/cache.d.ts", - directories: { - lib: "lib", - test: "__tests__" - }, - files: [ - "lib", - "!.DS_Store" - ], - publishConfig: { - access: "public" - }, - repository: { - type: "git", - url: "git+https://github.com/actions/toolkit.git", - directory: "packages/cache" - }, - scripts: { - "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", - test: 'echo "Error: run tests from root" && exit 1', - tsc: "tsc" - }, - bugs: { - url: "https://github.com/actions/toolkit/issues" - }, - dependencies: { - "@actions/core": "^2.0.0", - "@actions/exec": "^2.0.0", - "@actions/glob": "^0.5.1", - "@protobuf-ts/runtime-rpc": "^2.11.1", - "@actions/http-client": "^3.0.2", - "@actions/io": "^2.0.0", - "@azure/abort-controller": "^1.1.0", - "@azure/core-rest-pipeline": "^1.22.0", - "@azure/storage-blob": "^12.29.1", - semver: "^6.3.1" - }, - devDependencies: { - "@types/node": "^24.1.0", - "@types/semver": "^6.0.0", - "@protobuf-ts/plugin": "^2.9.4", - typescript: "^5.2.2" - }, - overrides: { - "uri-js": "npm:uri-js-replace@^1.0.1", - "node-fetch": "^3.3.2" - } - }; - } -}); - -// node_modules/@actions/cache/lib/internal/shared/user-agent.js -var require_user_agent = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js +var require_message_type_contract = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUserAgentString = getUserAgentString; - var packageJson = require_package2(); - function getUserAgentString() { - return `@actions/cache-${packageJson.version}`; - } + exports2.MESSAGE_TYPE = void 0; + exports2.MESSAGE_TYPE = /* @__PURE__ */ Symbol.for("protobuf-ts/message-type"); } }); -// node_modules/@actions/cache/lib/internal/cacheHttpClient.js -var require_cacheHttpClient = __commonJS({ - "node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js +var require_lower_camel_case = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCacheEntry = getCacheEntry; - exports2.downloadCache = downloadCache; - exports2.reserveCache = reserveCache; - exports2.saveCache = saveCache3; - var core12 = __importStar2(require_core()); - var http_client_1 = require_lib(); - var auth_1 = require_auth(); - var fs = __importStar2(require("fs")); - var url_1 = require("url"); - var utils = __importStar2(require_cacheUtils()); - var uploadUtils_1 = require_uploadUtils(); - var downloadUtils_1 = require_downloadUtils(); - var options_1 = require_options(); - var requestUtils_1 = require_requestUtils(); - var config_1 = require_config(); - var user_agent_1 = require_user_agent(); - function getCacheApiUrl(resource) { - const baseUrl = (0, config_1.getCacheServiceURL)(); - if (!baseUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); - } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core12.debug(`Resource Url: ${url}`); - return url; - } - function createAcceptHeader(type2, apiVersion) { - return `${type2};api-version=${apiVersion}`; - } - function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader("application/json", "6.0-preview.1") - } - }; - return requestOptions; - } - function createHttpClient() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); - } - function getCacheEntry(keys, paths, options) { - return __awaiter2(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 204) { - if (core12.isDebug()) { - yield printCachesListForDiagnostics(keys[0], httpClient, version); - } - return null; - } - if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); - } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error("Cache not found."); - } - core12.setSecret(cacheDownloadUrl); - core12.debug(`Cache Result:`); - core12.debug(JSON.stringify(cacheResult)); - return cacheResult; - }); - } - function printCachesListForDiagnostics(key, httpClient, version) { - return __awaiter2(this, void 0, void 0, function* () { - const resource = `caches?key=${encodeURIComponent(key)}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 200) { - const cacheListResult = response.result; - const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; - if (totalCount && totalCount > 0) { - core12.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key -Other caches with similar key:`); - for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core12.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); - } - } - } - }); - } - function downloadCache(archiveLocation, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { - if (downloadOptions.useAzureSdk) { - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); - } else if (downloadOptions.concurrentBlobDownloads) { - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); - } - } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); - } - }); - } - function reserveCache(key, paths, options) { - return __awaiter2(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const reserveCacheRequest = { - key, - version, - cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize - }; - const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); - })); - return response; - }); - } - function getContentRange(start, end) { - return `bytes ${start}-${end}/*`; - } - function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter2(this, void 0, void 0, function* () { - core12.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - "Content-Type": "application/octet-stream", - "Content-Range": getContentRange(start, end) - }; - const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - })); - if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { - throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); - } - }); - } - function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter2(this, void 0, void 0, function* () { - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs.openSync(archivePath, "r"); - const uploadOptions = (0, options_1.getUploadOptions)(options); - const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core12.debug("Awaiting all uploads"); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter2(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }).on("error", (error3) => { - throw new Error(`Cache upload failed because file read failed with ${error3.message}`); - }), start, end); - } - }))); - } finally { - fs.closeSync(fd); - } - return; - }); - } - function commitCache(httpClient, cacheId, filesize) { - return __awaiter2(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter2(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); - }); - } - function saveCache3(cacheId, archivePath, signedUploadURL, options) { - return __awaiter2(this, void 0, void 0, function* () { - const uploadOptions = (0, options_1.getUploadOptions)(options); - if (uploadOptions.useAzureSdk) { - if (!signedUploadURL) { - throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); - } - yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); - } else { - const httpClient = createHttpClient(); - core12.debug("Upload cache"); - yield uploadFile(httpClient, cacheId, archivePath, options); - core12.debug("Commiting cache"); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); - } - core12.info("Cache saved successfully"); + exports2.lowerCamelCase = void 0; + function lowerCamelCase(snakeCase) { + let capNext = false; + const sb = []; + for (let i = 0; i < snakeCase.length; i++) { + let next = snakeCase.charAt(i); + if (next == "_") { + capNext = true; + } else if (/\d/.test(next)) { + sb.push(next); + capNext = true; + } else if (capNext) { + sb.push(next.toUpperCase()); + capNext = false; + } else if (i == 0) { + sb.push(next.toLowerCase()); + } else { + sb.push(next); } - }); + } + return sb.join(""); } + exports2.lowerCamelCase = lowerCamelCase; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js -var require_json_typings = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js +var require_reflection_info = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isJsonObject = exports2.typeofJsonValue = void 0; - function typeofJsonValue(value) { - let t = typeof value; - if (t == "object") { - if (Array.isArray(value)) - return "array"; - if (value === null) - return "null"; + exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; + var lower_camel_case_1 = require_lower_camel_case(); + var ScalarType; + (function(ScalarType2) { + ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; + ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; + ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; + ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; + ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; + ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; + ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; + ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; + ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; + ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; + ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; + ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; + ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; + ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; + ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; + })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); + var LongType; + (function(LongType2) { + LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; + LongType2[LongType2["STRING"] = 1] = "STRING"; + LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; + })(LongType = exports2.LongType || (exports2.LongType = {})); + var RepeatType; + (function(RepeatType2) { + RepeatType2[RepeatType2["NO"] = 0] = "NO"; + RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; + RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; + })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); + function normalizeFieldInfo(field) { + var _a, _b, _c, _d; + field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); + field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); + field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; + field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; + return field; + } + exports2.normalizeFieldInfo = normalizeFieldInfo; + function readFieldOptions(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + } + exports2.readFieldOptions = readFieldOptions; + function readFieldOption(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; } - return t; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.typeofJsonValue = typeofJsonValue; - function isJsonObject(value) { - return value !== null && typeof value == "object" && !Array.isArray(value); + exports2.readFieldOption = readFieldOption; + function readMessageOption(messageType, extensionName, extensionType) { + const options = messageType.options; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.isJsonObject = isJsonObject; + exports2.readMessageOption = readMessageOption; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/base64.js -var require_base642 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js +var require_oneof = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.base64encode = exports2.base64decode = void 0; - var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); - var decTable = []; - for (let i = 0; i < encTable.length; i++) - decTable[encTable[i].charCodeAt(0)] = i; - decTable["-".charCodeAt(0)] = encTable.indexOf("+"); - decTable["_".charCodeAt(0)] = encTable.indexOf("/"); - function base64decode(base64Str) { - let es = base64Str.length * 3 / 4; - if (base64Str[base64Str.length - 2] == "=") - es -= 2; - else if (base64Str[base64Str.length - 1] == "=") - es -= 1; - let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; - for (let i = 0; i < base64Str.length; i++) { - b = decTable[base64Str.charCodeAt(i)]; - if (b === void 0) { - switch (base64Str[i]) { - case "=": - groupPos = 0; - // reset state when padding found - case "\n": - case "\r": - case " ": - case " ": - continue; - // skip white-space, and padding - default: - throw Error(`invalid base64 string.`); - } - } - switch (groupPos) { - case 0: - p = b; - groupPos = 1; - break; - case 1: - bytes[bytePos++] = p << 2 | (b & 48) >> 4; - p = b; - groupPos = 2; - break; - case 2: - bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; - p = b; - groupPos = 3; - break; - case 3: - bytes[bytePos++] = (p & 3) << 6 | b; - groupPos = 0; - break; - } + exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; + function isOneofGroup(any) { + if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { + return false; + } + switch (typeof any.oneofKind) { + case "string": + if (any[any.oneofKind] === void 0) + return false; + return Object.keys(any).length == 2; + case "undefined": + return Object.keys(any).length == 1; + default: + return false; } - if (groupPos == 1) - throw Error(`invalid base64 string.`); - return bytes.subarray(0, bytePos); } - exports2.base64decode = base64decode; - function base64encode(bytes) { - let base64 = "", groupPos = 0, b, p = 0; - for (let i = 0; i < bytes.length; i++) { - b = bytes[i]; - switch (groupPos) { - case 0: - base64 += encTable[b >> 2]; - p = (b & 3) << 4; - groupPos = 1; - break; - case 1: - base64 += encTable[p | b >> 4]; - p = (b & 15) << 2; - groupPos = 2; - break; - case 2: - base64 += encTable[p | b >> 6]; - base64 += encTable[b & 63]; - groupPos = 0; - break; - } + exports2.isOneofGroup = isOneofGroup; + function getOneofValue(oneof, kind) { + return oneof[kind]; + } + exports2.getOneofValue = getOneofValue; + function setOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; } - if (groupPos) { - base64 += encTable[p]; - base64 += "="; - if (groupPos == 1) - base64 += "="; + oneof.oneofKind = kind; + if (value !== void 0) { + oneof[kind] = value; } - return base64; } - exports2.base64encode = base64encode; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js -var require_protobufjs_utf8 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.utf8read = void 0; - var fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); - function utf8read(bytes) { - if (bytes.length < 1) - return ""; - let pos = 0, parts = [], chunk = [], i = 0, t; - let len = bytes.length; - while (pos < len) { - t = bytes[pos++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; - chunk[i++] = 55296 + (t >> 10); - chunk[i++] = 56320 + (t & 1023); - } else - chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; - if (i > 8191) { - parts.push(fromCharCodes(chunk)); - i = 0; - } + exports2.setOneofValue = setOneofValue; + function setUnknownOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; } - if (parts.length) { - if (i) - parts.push(fromCharCodes(chunk.slice(0, i))); - return parts.join(""); + oneof.oneofKind = kind; + if (value !== void 0 && kind !== void 0) { + oneof[kind] = value; } - return fromCharCodes(chunk.slice(0, i)); } - exports2.utf8read = utf8read; + exports2.setUnknownOneofValue = setUnknownOneofValue; + function clearOneofValue(oneof) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = void 0; + } + exports2.clearOneofValue = clearOneofValue; + function getSelectedOneofValue(oneof) { + if (oneof.oneofKind === void 0) { + return void 0; + } + return oneof[oneof.oneofKind]; + } + exports2.getSelectedOneofValue = getSelectedOneofValue; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js -var require_binary_format_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js +var require_reflection_type_check = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; - var UnknownFieldHandler; - (function(UnknownFieldHandler2) { - UnknownFieldHandler2.symbol = /* @__PURE__ */ Symbol.for("protobuf-ts/unknown"); - UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { - let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; - container.push({ no: fieldNo, wireType, data }); - }; - UnknownFieldHandler2.onWrite = (typeName, message, writer) => { - for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) - writer.tag(no, wireType).raw(data); - }; - UnknownFieldHandler2.list = (message, fieldNo) => { - if (is(message)) { - let all = message[UnknownFieldHandler2.symbol]; - return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; + exports2.ReflectionTypeCheck = void 0; + var reflection_info_1 = require_reflection_info(); + var oneof_1 = require_oneof(); + var ReflectionTypeCheck = class { + constructor(info6) { + var _a; + this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; + } + prepare() { + if (this.data) + return; + const req = [], known = [], oneofs = []; + for (let field of this.fields) { + if (field.oneof) { + if (!oneofs.includes(field.oneof)) { + oneofs.push(field.oneof); + req.push(field.oneof); + known.push(field.oneof); + } + } else { + known.push(field.localName); + switch (field.kind) { + case "scalar": + case "enum": + if (!field.opt || field.repeat) + req.push(field.localName); + break; + case "message": + if (field.repeat) + req.push(field.localName); + break; + case "map": + req.push(field.localName); + break; + } + } } - return []; - }; - UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; - const is = (message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]); - })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); - function mergeBinaryOptions(a, b) { - return Object.assign(Object.assign({}, a), b); - } - exports2.mergeBinaryOptions = mergeBinaryOptions; - var WireType; - (function(WireType2) { - WireType2[WireType2["Varint"] = 0] = "Varint"; - WireType2[WireType2["Bit64"] = 1] = "Bit64"; - WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; - WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; - WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; - WireType2[WireType2["Bit32"] = 5] = "Bit32"; - })(WireType = exports2.WireType || (exports2.WireType = {})); + this.data = { req, known, oneofs: Object.values(oneofs) }; + } + /** + * Is the argument a valid message as specified by the + * reflection information? + * + * Checks all field types recursively. The `depth` + * specifies how deep into the structure the check will be. + * + * With a depth of 0, only the presence of fields + * is checked. + * + * With a depth of 1 or more, the field types are checked. + * + * With a depth of 2 or more, the members of map, repeated + * and message fields are checked. + * + * Message fields will be checked recursively with depth - 1. + * + * The number of map entries / repeated values being checked + * is < depth. + */ + is(message, depth, allowExcessProperties = false) { + if (depth < 0) + return true; + if (message === null || message === void 0 || typeof message != "object") + return false; + this.prepare(); + let keys = Object.keys(message), data = this.data; + if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) + return false; + if (!allowExcessProperties) { + if (keys.some((k) => !data.known.includes(k))) + return false; + } + if (depth < 1) { + return true; + } + for (const name of data.oneofs) { + const group = message[name]; + if (!oneof_1.isOneofGroup(group)) + return false; + if (group.oneofKind === void 0) + continue; + const field = this.fields.find((f) => f.localName === group.oneofKind); + if (!field) + return false; + if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) + return false; + } + for (const field of this.fields) { + if (field.oneof !== void 0) + continue; + if (!this.field(message[field.localName], field, allowExcessProperties, depth)) + return false; + } + return true; + } + field(arg, field, allowExcessProperties, depth) { + let repeated = field.repeat; + switch (field.kind) { + case "scalar": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, field.T, depth, field.L); + return this.scalar(arg, field.T, field.L); + case "enum": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); + return this.scalar(arg, reflection_info_1.ScalarType.INT32); + case "message": + if (arg === void 0) + return true; + if (repeated) + return this.messages(arg, field.T(), allowExcessProperties, depth); + return this.message(arg, field.T(), allowExcessProperties, depth); + case "map": + if (typeof arg != "object" || arg === null) + return false; + if (depth < 2) + return true; + if (!this.mapKeys(arg, field.K, depth)) + return false; + switch (field.V.kind) { + case "scalar": + return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); + case "enum": + return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); + case "message": + return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + } + break; + } + return true; + } + message(arg, type2, allowExcessProperties, depth) { + if (allowExcessProperties) { + return type2.isAssignable(arg, depth); + } + return type2.is(arg, depth); + } + messages(arg, type2, allowExcessProperties, depth) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (allowExcessProperties) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type2.isAssignable(arg[i], depth - 1)) + return false; + } else { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type2.is(arg[i], depth - 1)) + return false; + } + return true; + } + scalar(arg, type2, longType) { + let argType = typeof arg; + switch (type2) { + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + switch (longType) { + case reflection_info_1.LongType.BIGINT: + return argType == "bigint"; + case reflection_info_1.LongType.NUMBER: + return argType == "number" && !isNaN(arg); + default: + return argType == "string"; + } + case reflection_info_1.ScalarType.BOOL: + return argType == "boolean"; + case reflection_info_1.ScalarType.STRING: + return argType == "string"; + case reflection_info_1.ScalarType.BYTES: + return arg instanceof Uint8Array; + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return argType == "number" && !isNaN(arg); + default: + return argType == "number" && Number.isInteger(arg); + } + } + scalars(arg, type2, depth, longType) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (Array.isArray(arg)) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!this.scalar(arg[i], type2, longType)) + return false; + } + return true; + } + mapKeys(map2, type2, depth) { + let keys = Object.keys(map2); + switch (type2) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type2, depth); + case reflection_info_1.ScalarType.BOOL: + return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type2, depth); + default: + return this.scalars(keys, type2, depth, reflection_info_1.LongType.STRING); + } + } + }; + exports2.ReflectionTypeCheck = ReflectionTypeCheck; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js -var require_goog_varint = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js +var require_reflection_long_convert = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; - function varint64read() { - let lowBits = 0; - let highBits = 0; - for (let shift = 0; shift < 28; shift += 7) { - let b = this.buf[this.pos++]; - lowBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } + exports2.reflectionLongConvert = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionLongConvert(long, type2) { + switch (type2) { + case reflection_info_1.LongType.BIGINT: + return long.toBigInt(); + case reflection_info_1.LongType.NUMBER: + return long.toNumber(); + default: + return long.toString(); } - let middleByte = this.buf[this.pos++]; - lowBits |= (middleByte & 15) << 28; - highBits = (middleByte & 112) >> 4; - if ((middleByte & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; + } + exports2.reflectionLongConvert = reflectionLongConvert; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js +var require_reflection_json_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionJsonReader = void 0; + var json_typings_1 = require_json_typings(); + var base64_1 = require_base642(); + var reflection_info_1 = require_reflection_info(); + var pb_long_1 = require_pb_long(); + var assert_1 = require_assert(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var ReflectionJsonReader = class { + constructor(info6) { + this.info = info6; } - for (let shift = 3; shift <= 31; shift += 7) { - let b = this.buf[this.pos++]; - highBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; + prepare() { + var _a; + if (this.fMap === void 0) { + this.fMap = {}; + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + for (const field of fieldsInput) { + this.fMap[field.name] = field; + this.fMap[field.jsonName] = field; + this.fMap[field.localName] = field; + } } } - throw new Error("invalid varint"); - } - exports2.varint64read = varint64read; - function varint64write(lo, hi, bytes) { - for (let i = 0; i < 28; i = i + 7) { - const shift = lo >>> i; - const hasNext = !(shift >>> 7 == 0 && hi == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; + // Cannot parse JSON for #. + assert(condition, fieldName, jsonValue) { + if (!condition) { + let what = json_typings_1.typeofJsonValue(jsonValue); + if (what == "number" || what == "boolean") + what = jsonValue.toString(); + throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); } } - const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; - const hasMoreBits = !(hi >> 3 == 0); - bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); - if (!hasMoreBits) { - return; - } - for (let i = 3; i < 31; i = i + 7) { - const shift = hi >>> i; - const hasNext = !(shift >>> 7 == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; + /** + * Reads a message from canonical JSON format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(input, message, options) { + this.prepare(); + const oneofsHandled = []; + for (const [jsonKey, jsonValue] of Object.entries(input)) { + const field = this.fMap[jsonKey]; + if (!field) { + if (!options.ignoreUnknownFields) + throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); + continue; + } + const localName = field.localName; + let target; + if (field.oneof) { + if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { + continue; + } + if (oneofsHandled.includes(field.oneof)) + throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); + oneofsHandled.push(field.oneof); + target = message[field.oneof] = { + oneofKind: localName + }; + } else { + target = message; + } + if (field.kind == "map") { + if (jsonValue === null) { + continue; + } + this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); + const fieldObj = target[localName]; + for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { + this.assert(jsonObjValue !== null, field.name + " map value", null); + let val; + switch (field.V.kind) { + case "message": + val = field.V.T().internalJsonRead(jsonObjValue, options); + break; + case "enum": + val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); + break; + } + this.assert(val !== void 0, field.name + " map value", jsonObjValue); + let key = jsonObjKey; + if (field.K == reflection_info_1.ScalarType.BOOL) + key = key == "true" ? true : key == "false" ? false : key; + key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); + fieldObj[key] = val; + } + } else if (field.repeat) { + if (jsonValue === null) + continue; + this.assert(Array.isArray(jsonValue), field.name, jsonValue); + const fieldArr = target[localName]; + for (const jsonItem of jsonValue) { + this.assert(jsonItem !== null, field.name, null); + let val; + switch (field.kind) { + case "message": + val = field.T().internalJsonRead(jsonItem, options); + break; + case "enum": + val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonItem, field.T, field.L, field.name); + break; + } + this.assert(val !== void 0, field.name, jsonValue); + fieldArr.push(val); + } + } else { + switch (field.kind) { + case "message": + if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { + this.assert(field.oneof === void 0, field.name + " (oneof member)", null); + continue; + } + target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); + break; + case "enum": + if (jsonValue === null) + continue; + let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + target[localName] = val; + break; + case "scalar": + if (jsonValue === null) + continue; + target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); + break; + } + } } } - bytes.push(hi >>> 31 & 1); - } - exports2.varint64write = varint64write; - var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); - function int64fromString(dec) { - let minus = dec[0] == "-"; - if (minus) - dec = dec.slice(1); - const base = 1e6; - let lowBits = 0; - let highBits = 0; - function add1e6digit(begin, end) { - const digit1e6 = Number(dec.slice(begin, end)); - highBits *= base; - lowBits = lowBits * base + digit1e6; - if (lowBits >= TWO_PWR_32_DBL) { - highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); - lowBits = lowBits % TWO_PWR_32_DBL; + /** + * Returns `false` for unrecognized string representations. + * + * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + */ + enum(type2, json2, fieldName, ignoreUnknownFields) { + if (type2[0] == "google.protobuf.NullValue") + assert_1.assert(json2 === null || json2 === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} only accepts null.`); + if (json2 === null) + return 0; + switch (typeof json2) { + case "number": + assert_1.assert(Number.isInteger(json2), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json2}.`); + return json2; + case "string": + let localEnumName = json2; + if (type2[2] && json2.substring(0, type2[2].length) === type2[2]) + localEnumName = json2.substring(type2[2].length); + let enumNumber = type2[1][localEnumName]; + if (typeof enumNumber === "undefined" && ignoreUnknownFields) { + return false; + } + assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} has no value for "${json2}".`); + return enumNumber; } + assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json2}".`); } - add1e6digit(-24, -18); - add1e6digit(-18, -12); - add1e6digit(-12, -6); - add1e6digit(-6); - return [minus, lowBits, highBits]; - } - exports2.int64fromString = int64fromString; - function int64toString(bitsLow, bitsHigh) { - if (bitsHigh >>> 0 <= 2097151) { - return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); + scalar(json2, type2, longType, fieldName) { + let e; + try { + switch (type2) { + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + if (json2 === null) + return 0; + if (json2 === "NaN") + return Number.NaN; + if (json2 === "Infinity") + return Number.POSITIVE_INFINITY; + if (json2 === "-Infinity") + return Number.NEGATIVE_INFINITY; + if (json2 === "") { + e = "empty string"; + break; + } + if (typeof json2 == "string" && json2.trim().length !== json2.length) { + e = "extra whitespace"; + break; + } + if (typeof json2 != "string" && typeof json2 != "number") { + break; + } + let float2 = Number(json2); + if (Number.isNaN(float2)) { + e = "not a number"; + break; + } + if (!Number.isFinite(float2)) { + e = "too large or small"; + break; + } + if (type2 == reflection_info_1.ScalarType.FLOAT) + assert_1.assertFloat32(float2); + return float2; + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + if (json2 === null) + return 0; + let int32; + if (typeof json2 == "number") + int32 = json2; + else if (json2 === "") + e = "empty string"; + else if (typeof json2 == "string") { + if (json2.trim().length !== json2.length) + e = "extra whitespace"; + else + int32 = Number(json2); + } + if (int32 === void 0) + break; + if (type2 == reflection_info_1.ScalarType.UINT32) + assert_1.assertUInt32(int32); + else + assert_1.assertInt32(int32); + return int32; + // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + if (json2 === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + if (typeof json2 != "number" && typeof json2 != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json2), longType); + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.UINT64: + if (json2 === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + if (typeof json2 != "number" && typeof json2 != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json2), longType); + // bool: + case reflection_info_1.ScalarType.BOOL: + if (json2 === null) + return false; + if (typeof json2 !== "boolean") + break; + return json2; + // string: + case reflection_info_1.ScalarType.STRING: + if (json2 === null) + return ""; + if (typeof json2 !== "string") { + e = "extra whitespace"; + break; + } + try { + encodeURIComponent(json2); + } catch (e2) { + e2 = "invalid UTF8"; + break; + } + return json2; + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + if (json2 === null || json2 === "") + return new Uint8Array(0); + if (typeof json2 !== "string") + break; + return base64_1.base64decode(json2); + } + } catch (error3) { + e = error3.message; + } + this.assert(false, fieldName + (e ? " - " + e : ""), json2); } - let low = bitsLow & 16777215; - let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; - let high = bitsHigh >> 16 & 65535; - let digitA = low + mid * 6777216 + high * 6710656; - let digitB = mid + high * 8147497; - let digitC = high * 2; - let base = 1e7; - if (digitA >= base) { - digitB += Math.floor(digitA / base); - digitA %= base; + }; + exports2.ReflectionJsonReader = ReflectionJsonReader; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js +var require_reflection_json_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionJsonWriter = void 0; + var base64_1 = require_base642(); + var pb_long_1 = require_pb_long(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var ReflectionJsonWriter = class { + constructor(info6) { + var _a; + this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; } - if (digitB >= base) { - digitC += Math.floor(digitB / base); - digitB %= base; + /** + * Converts the message to a JSON object, based on the field descriptors. + */ + write(message, options) { + const json2 = {}, source = message; + for (const field of this.fields) { + if (!field.oneof) { + let jsonValue2 = this.field(field, source[field.localName], options); + if (jsonValue2 !== void 0) + json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; + continue; + } + const group = source[field.oneof]; + if (group.oneofKind !== field.localName) + continue; + const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; + let jsonValue = this.field(field, group[field.localName], opt); + assert_1.assert(jsonValue !== void 0); + json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + } + return json2; } - function decimalFrom1e7(digit1e7, needLeadingZeros) { - let partial = digit1e7 ? String(digit1e7) : ""; - if (needLeadingZeros) { - return "0000000".slice(partial.length) + partial; + field(field, value, options) { + let jsonValue = void 0; + if (field.kind == "map") { + assert_1.assert(typeof value == "object" && value !== null); + const jsonObj = {}; + switch (field.V.kind) { + case "scalar": + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.scalar(field.V.T, entryValue, field.name, false, true); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "message": + const messageType = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.message(messageType, entryValue, field.name, options); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "enum": + const enumInfo = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); + const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + } + if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) + jsonValue = jsonObj; + } else if (field.repeat) { + assert_1.assert(Array.isArray(value)); + const jsonArr = []; + switch (field.kind) { + case "scalar": + for (let i = 0; i < value.length; i++) { + const val = this.scalar(field.T, value[i], field.name, field.opt, true); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "enum": + const enumInfo = field.T(); + for (let i = 0; i < value.length; i++) { + assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); + const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "message": + const messageType = field.T(); + for (let i = 0; i < value.length; i++) { + const val = this.message(messageType, value[i], field.name, options); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + } + if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) + jsonValue = jsonArr; + } else { + switch (field.kind) { + case "scalar": + jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + break; + case "enum": + jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + break; + case "message": + jsonValue = this.message(field.T(), value, field.name, options); + break; + } } - return partial; + return jsonValue; } - return decimalFrom1e7( - digitC, - /*needLeadingZeros=*/ - 0 - ) + decimalFrom1e7( - digitB, - /*needLeadingZeros=*/ - digitC - ) + // If the final 1e7 digit didn't need leading zeros, we would have - // returned via the trivial code path at the top. - decimalFrom1e7( - digitA, - /*needLeadingZeros=*/ - 1 - ); - } - exports2.int64toString = int64toString; - function varint32write(value, bytes) { - if (value >= 0) { - while (value > 127) { - bytes.push(value & 127 | 128); - value = value >>> 7; - } - bytes.push(value); - } else { - for (let i = 0; i < 9; i++) { - bytes.push(value & 127 | 128); - value = value >> 7; + /** + * Returns `null` as the default for google.protobuf.NullValue. + */ + enum(type2, value, fieldName, optional, emitDefaultValues, enumAsInteger) { + if (type2[0] == "google.protobuf.NullValue") + return !emitDefaultValues && !optional ? void 0 : null; + if (value === void 0) { + assert_1.assert(optional); + return void 0; } - bytes.push(1); - } - } - exports2.varint32write = varint32write; - function varint32read() { - let b = this.buf[this.pos++]; - let result = b & 127; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + if (value === 0 && !emitDefaultValues && !optional) + return void 0; + assert_1.assert(typeof value == "number"); + assert_1.assert(Number.isInteger(value)); + if (enumAsInteger || !type2[1].hasOwnProperty(value)) + return value; + if (type2[2]) + return type2[2] + type2[1][value]; + return type2[1][value]; } - b = this.buf[this.pos++]; - result |= (b & 127) << 7; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + message(type2, value, fieldName, options) { + if (value === void 0) + return options.emitDefaultValues ? null : void 0; + return type2.internalJsonWrite(value, options); } - b = this.buf[this.pos++]; - result |= (b & 127) << 14; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + scalar(type2, value, fieldName, optional, emitDefaultValues) { + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + const ed = emitDefaultValues || optional; + switch (type2) { + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertInt32(value); + return value; + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertUInt32(value); + return value; + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.FLOAT: + assert_1.assertFloat32(value); + case reflection_info_1.ScalarType.DOUBLE: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assert(typeof value == "number"); + if (Number.isNaN(value)) + return "NaN"; + if (value === Number.POSITIVE_INFINITY) + return "Infinity"; + if (value === Number.NEGATIVE_INFINITY) + return "-Infinity"; + return value; + // string: + case reflection_info_1.ScalarType.STRING: + if (value === "") + return ed ? "" : void 0; + assert_1.assert(typeof value == "string"); + return value; + // bool: + case reflection_info_1.ScalarType.BOOL: + if (value === false) + return ed ? false : void 0; + assert_1.assert(typeof value == "boolean"); + return value; + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let ulong = pb_long_1.PbULong.from(value); + if (ulong.isZero() && !ed) + return void 0; + return ulong.toString(); + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let long = pb_long_1.PbLong.from(value); + if (long.isZero() && !ed) + return void 0; + return long.toString(); + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + assert_1.assert(value instanceof Uint8Array); + if (!value.byteLength) + return ed ? "" : void 0; + return base64_1.base64encode(value); + } } - b = this.buf[this.pos++]; - result |= (b & 127) << 21; - if ((b & 128) == 0) { - this.assertBounds(); - return result; + }; + exports2.ReflectionJsonWriter = ReflectionJsonWriter; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js +var require_reflection_scalar_default = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionScalarDefault = void 0; + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var pb_long_1 = require_pb_long(); + function reflectionScalarDefault(type2, longType = reflection_info_1.LongType.STRING) { + switch (type2) { + case reflection_info_1.ScalarType.BOOL: + return false; + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return 0; + case reflection_info_1.ScalarType.BYTES: + return new Uint8Array(0); + case reflection_info_1.ScalarType.STRING: + return ""; + default: + return 0; } - b = this.buf[this.pos++]; - result |= (b & 15) << 28; - for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) - b = this.buf[this.pos++]; - if ((b & 128) != 0) - throw new Error("invalid varint"); - this.assertBounds(); - return result >>> 0; } - exports2.varint32read = varint32read; + exports2.reflectionScalarDefault = reflectionScalarDefault; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js -var require_pb_long = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js +var require_reflection_binary_reader = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; - var goog_varint_1 = require_goog_varint(); - var BI; - function detectBi() { - const dv = new DataView(new ArrayBuffer(8)); - const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; - BI = ok ? { - MIN: BigInt("-9223372036854775808"), - MAX: BigInt("9223372036854775807"), - UMIN: BigInt("0"), - UMAX: BigInt("18446744073709551615"), - C: BigInt, - V: dv - } : void 0; - } - exports2.detectBi = detectBi; - detectBi(); - function assertBi(bi) { - if (!bi) - throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); - } - var RE_DECIMAL_STR = /^-?[0-9]+$/; - var TWO_PWR_32_DBL = 4294967296; - var HALF_2_PWR_32 = 2147483648; - var SharedPbLong = class { - /** - * Create a new instance with the given bits. - */ - constructor(lo, hi) { - this.lo = lo | 0; - this.hi = hi | 0; - } - /** - * Is this instance equal to 0? - */ - isZero() { - return this.lo == 0 && this.hi == 0; + exports2.ReflectionBinaryReader = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var ReflectionBinaryReader = class { + constructor(info6) { + this.info = info6; } - /** - * Convert to a native number. - */ - toNumber() { - let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); - if (!Number.isSafeInteger(result)) - throw new Error("cannot convert to safe number"); - return result; + prepare() { + var _a; + if (!this.fieldNoToField) { + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); + } } - }; - var PbULong = class _PbULong extends SharedPbLong { /** - * Create instance from a `string`, `number` or `bigint`. + * Reads a message from binary format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.UMIN) - throw new Error("signed value for ulong"); - if (value > BI.UMAX) - throw new Error("ulong too large"); - BI.V.setBigUint64(0, value, true); - return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); - } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) - throw new Error("signed value for ulong"); - return new _PbULong(lo, hi); - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - if (value < 0) - throw new Error("signed value for ulong"); - return new _PbULong(value, value / TWO_PWR_32_DBL); + read(reader, message, options, length) { + this.prepare(); + const end = length === void 0 ? reader.len : reader.pos + length; + while (reader.pos < end) { + const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); + if (!field) { + let u = options.readUnknownField; + if (u == "throw") + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); + continue; } - throw new Error("unknown value " + typeof value); - } - /** - * Convert to decimal string. - */ - toString() { - return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); - } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigUint64(0, true); - } - }; - exports2.PbULong = PbULong; - PbULong.ZERO = new PbULong(0, 0); - var PbLong = class _PbLong extends SharedPbLong { - /** - * Create instance from a `string`, `number` or `bigint`. - */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.MIN) - throw new Error("signed long too small"); - if (value > BI.MAX) - throw new Error("signed long too large"); - BI.V.setBigInt64(0, value, true); - return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + let target = message, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + target = target[field.oneof]; + if (target.oneofKind !== localName) + target = message[field.oneof] = { + oneofKind: localName + }; } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) { - if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) - throw new Error("signed long too small"); - } else if (hi >= HALF_2_PWR_32) - throw new Error("signed long too large"); - let pbl = new _PbLong(lo, hi); - return minus ? pbl.negate() : pbl; - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + let L = field.kind == "scalar" ? field.L : void 0; + if (repeated) { + let arr = target[localName]; + if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { + let e = reader.uint32() + reader.pos; + while (reader.pos < e) + arr.push(this.scalar(reader, T, L)); + } else + arr.push(this.scalar(reader, T, L)); + } else + target[localName] = this.scalar(reader, T, L); + break; + case "message": + if (repeated) { + let arr = target[localName]; + let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); + arr.push(msg); + } else + target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + break; + case "map": + let [mapKey, mapVal] = this.mapEntry(field, reader, options); + target[localName][mapKey] = mapVal; + break; } - throw new Error("unknown value " + typeof value); - } - /** - * Do we have a minus sign? - */ - isNegative() { - return (this.hi & HALF_2_PWR_32) !== 0; - } - /** - * Negate two's complement. - * Invert all the bits and add one to the result. - */ - negate() { - let hi = ~this.hi, lo = this.lo; - if (lo) - lo = ~lo + 1; - else - hi += 1; - return new _PbLong(lo, hi); + } } /** - * Convert to decimal string. + * Read a map field, expecting key field = 1, value field = 2 */ - toString() { - if (BI) - return this.toBigInt().toString(); - if (this.isNegative()) { - let n = this.negate(); - return "-" + goog_varint_1.int64toString(n.lo, n.hi); + mapEntry(field, reader, options) { + let length = reader.uint32(); + let end = reader.pos + length; + let key = void 0; + let val = void 0; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + if (field.K == reflection_info_1.ScalarType.BOOL) + key = reader.bool().toString(); + else + key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + break; + case 2: + switch (field.V.kind) { + case "scalar": + val = this.scalar(reader, field.V.T, field.V.L); + break; + case "enum": + val = reader.int32(); + break; + case "message": + val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); + break; + } + break; + default: + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + } } - return goog_varint_1.int64toString(this.lo, this.hi); + if (key === void 0) { + let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); + key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + } + if (val === void 0) + switch (field.V.kind) { + case "scalar": + val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + break; + case "enum": + val = 0; + break; + case "message": + val = field.V.T().create(); + break; + } + return [key, val]; } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigInt64(0, true); + scalar(reader, type2, longType) { + switch (type2) { + case reflection_info_1.ScalarType.INT32: + return reader.int32(); + case reflection_info_1.ScalarType.STRING: + return reader.string(); + case reflection_info_1.ScalarType.BOOL: + return reader.bool(); + case reflection_info_1.ScalarType.DOUBLE: + return reader.double(); + case reflection_info_1.ScalarType.FLOAT: + return reader.float(); + case reflection_info_1.ScalarType.INT64: + return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); + case reflection_info_1.ScalarType.UINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); + case reflection_info_1.ScalarType.FIXED32: + return reader.fixed32(); + case reflection_info_1.ScalarType.BYTES: + return reader.bytes(); + case reflection_info_1.ScalarType.UINT32: + return reader.uint32(); + case reflection_info_1.ScalarType.SFIXED32: + return reader.sfixed32(); + case reflection_info_1.ScalarType.SFIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); + case reflection_info_1.ScalarType.SINT32: + return reader.sint32(); + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + } } }; - exports2.PbLong = PbLong; - PbLong.ZERO = new PbLong(0, 0); + exports2.ReflectionBinaryReader = ReflectionBinaryReader; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js -var require_binary_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js +var require_reflection_binary_writer = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryReader = exports2.binaryReadOptions = void 0; + exports2.ReflectionBinaryWriter = void 0; var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var defaultsRead = { - readUnknownField: true, - readerFactory: (bytes) => new BinaryReader(bytes) - }; - function binaryReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - exports2.binaryReadOptions = binaryReadOptions; - var BinaryReader = class { - constructor(buf, textDecoder) { - this.varint64 = goog_varint_1.varint64read; - this.uint32 = goog_varint_1.varint32read; - this.buf = buf; - this.len = buf.length; - this.pos = 0; - this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); - this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { - fatal: true, - ignoreBOM: true - }); + var ReflectionBinaryWriter = class { + constructor(info6) { + this.info = info6; } - /** - * Reads a tag - field number and wire type. - */ - tag() { - let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; - if (fieldNo <= 0 || wireType < 0 || wireType > 5) - throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); - return [fieldNo, wireType]; + prepare() { + if (!this.fields) { + const fieldsInput = this.info.fields ? this.info.fields.concat() : []; + this.fields = fieldsInput.sort((a, b) => a.no - b.no); + } } /** - * Skip one element on the wire and return the skipped data. - * Supports WireType.StartGroup since v2.0.0-alpha.23. + * Writes the message to binary format. */ - skip(wireType) { - let start = this.pos; - switch (wireType) { - case binary_format_contract_1.WireType.Varint: - while (this.buf[this.pos++] & 128) { - } + write(message, writer, options) { + this.prepare(); + for (const field of this.fields) { + let value, emitDefault, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + const group = message[field.oneof]; + if (group.oneofKind !== localName) + continue; + value = group[localName]; + emitDefault = true; + } else { + value = message[localName]; + emitDefault = false; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (repeated) { + assert_1.assert(Array.isArray(value)); + if (repeated == reflection_info_1.RepeatType.PACKED) + this.packed(writer, T, field.no, value); + else + for (const item of value) + this.scalar(writer, T, field.no, item, true); + } else if (value === void 0) + assert_1.assert(field.opt); + else + this.scalar(writer, T, field.no, value, emitDefault || field.opt); + break; + case "message": + if (repeated) { + assert_1.assert(Array.isArray(value)); + for (const item of value) + this.message(writer, options, field.T(), field.no, item); + } else { + this.message(writer, options, field.T(), field.no, value); + } + break; + case "map": + assert_1.assert(typeof value == "object" && value !== null); + for (const [key, val] of Object.entries(value)) + this.mapEntry(writer, options, field, key, val); + break; + } + } + let u = options.writeUnknownFields; + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); + } + mapEntry(writer, options, field, key, value) { + writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let keyValue = key; + switch (field.K) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + keyValue = Number.parseInt(key); break; - case binary_format_contract_1.WireType.Bit64: - this.pos += 4; - case binary_format_contract_1.WireType.Bit32: - this.pos += 4; + case reflection_info_1.ScalarType.BOOL: + assert_1.assert(key == "true" || key == "false"); + keyValue = key == "true"; break; - case binary_format_contract_1.WireType.LengthDelimited: - let len = this.uint32(); - this.pos += len; + } + this.scalar(writer, field.K, 1, keyValue, true); + switch (field.V.kind) { + case "scalar": + this.scalar(writer, field.V.T, 2, value, true); break; - case binary_format_contract_1.WireType.StartGroup: - let t; - while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { - this.skip(t); - } + case "enum": + this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + break; + case "message": + this.message(writer, options, field.V.T(), 2, value); break; - default: - throw new Error("cant skip wire type " + wireType); } - this.assertBounds(); - return this.buf.subarray(start, this.pos); - } - /** - * Throws error if position in byte array is out of range. - */ - assertBounds() { - if (this.pos > this.len) - throw new RangeError("premature EOF"); - } - /** - * Read a `int32` field, a signed 32 bit varint. - */ - int32() { - return this.uint32() | 0; - } - /** - * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. - */ - sint32() { - let zze = this.uint32(); - return zze >>> 1 ^ -(zze & 1); - } - /** - * Read a `int64` field, a signed 64-bit varint. - */ - int64() { - return new pb_long_1.PbLong(...this.varint64()); - } - /** - * Read a `uint64` field, an unsigned 64-bit varint. - */ - uint64() { - return new pb_long_1.PbULong(...this.varint64()); - } - /** - * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. - */ - sint64() { - let [lo, hi] = this.varint64(); - let s = -(lo & 1); - lo = (lo >>> 1 | (hi & 1) << 31) ^ s; - hi = hi >>> 1 ^ s; - return new pb_long_1.PbLong(lo, hi); - } - /** - * Read a `bool` field, a variant. - */ - bool() { - let [lo, hi] = this.varint64(); - return lo !== 0 || hi !== 0; - } - /** - * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. - */ - fixed32() { - return this.view.getUint32((this.pos += 4) - 4, true); - } - /** - * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. - */ - sfixed32() { - return this.view.getInt32((this.pos += 4) - 4, true); + writer.join(); } - /** - * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. - */ - fixed64() { - return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + message(writer, options, handler2, fieldNo, value) { + if (value === void 0) + return; + handler2.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); + writer.join(); } /** - * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + * Write a single scalar value. */ - sfixed64() { - return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + scalar(writer, type2, fieldNo, value, emitDefault) { + let [wireType, method, isDefault] = this.scalarInfo(type2, value); + if (!isDefault || emitDefault) { + writer.tag(fieldNo, wireType); + writer[method](value); + } } /** - * Read a `float` field, 32-bit floating point number. + * Write an array of scalar values in packed format. */ - float() { - return this.view.getFloat32((this.pos += 4) - 4, true); + packed(writer, type2, fieldNo, value) { + if (!value.length) + return; + assert_1.assert(type2 !== reflection_info_1.ScalarType.BYTES && type2 !== reflection_info_1.ScalarType.STRING); + writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let [, method] = this.scalarInfo(type2); + for (let i = 0; i < value.length; i++) + writer[method](value[i]); + writer.join(); } /** - * Read a `double` field, a 64-bit floating point number. + * Get information for writing a scalar value. + * + * Returns tuple: + * [0]: appropriate WireType + * [1]: name of the appropriate method of IBinaryWriter + * [2]: whether the given value is a default value + * + * If argument `value` is omitted, [2] is always false. */ - double() { - return this.view.getFloat64((this.pos += 8) - 8, true); + scalarInfo(type2, value) { + let t = binary_format_contract_1.WireType.Varint; + let m; + let i = value === void 0; + let d = value === 0; + switch (type2) { + case reflection_info_1.ScalarType.INT32: + m = "int32"; + break; + case reflection_info_1.ScalarType.STRING: + d = i || !value.length; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "string"; + break; + case reflection_info_1.ScalarType.BOOL: + d = value === false; + m = "bool"; + break; + case reflection_info_1.ScalarType.UINT32: + m = "uint32"; + break; + case reflection_info_1.ScalarType.DOUBLE: + t = binary_format_contract_1.WireType.Bit64; + m = "double"; + break; + case reflection_info_1.ScalarType.FLOAT: + t = binary_format_contract_1.WireType.Bit32; + m = "float"; + break; + case reflection_info_1.ScalarType.INT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "int64"; + break; + case reflection_info_1.ScalarType.UINT64: + d = i || pb_long_1.PbULong.from(value).isZero(); + m = "uint64"; + break; + case reflection_info_1.ScalarType.FIXED64: + d = i || pb_long_1.PbULong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "fixed64"; + break; + case reflection_info_1.ScalarType.BYTES: + d = i || !value.byteLength; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "bytes"; + break; + case reflection_info_1.ScalarType.FIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "fixed32"; + break; + case reflection_info_1.ScalarType.SFIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "sfixed32"; + break; + case reflection_info_1.ScalarType.SFIXED64: + d = i || pb_long_1.PbLong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "sfixed64"; + break; + case reflection_info_1.ScalarType.SINT32: + m = "sint32"; + break; + case reflection_info_1.ScalarType.SINT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "sint64"; + break; + } + return [t, m, i || d]; } - /** - * Read a `bytes` field, length-delimited arbitrary data. - */ - bytes() { - let len = this.uint32(); - let start = this.pos; - this.pos += len; - this.assertBounds(); - return this.buf.subarray(start, start + len); + }; + exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js +var require_reflection_create = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionCreate = void 0; + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var message_type_contract_1 = require_message_type_contract(); + function reflectionCreate(type2) { + const msg = type2.messagePrototype ? Object.create(type2.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type2 }); + for (let field of type2.fields) { + let name = field.localName; + if (field.opt) + continue; + if (field.oneof) + msg[field.oneof] = { oneofKind: void 0 }; + else if (field.repeat) + msg[name] = []; + else + switch (field.kind) { + case "scalar": + msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + break; + case "enum": + msg[name] = 0; + break; + case "map": + msg[name] = {}; + break; + } } - /** - * Read a `string` field, length-delimited data converted to UTF-8 text. - */ - string() { - return this.textDecoder.decode(this.bytes()); + return msg; + } + exports2.reflectionCreate = reflectionCreate; + } +}); + +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js +var require_reflection_merge_partial = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionMergePartial = void 0; + function reflectionMergePartial(info6, target, source) { + let fieldValue, input = source, output; + for (let field of info6.fields) { + let name = field.localName; + if (field.oneof) { + const group = input[field.oneof]; + if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { + continue; + } + fieldValue = group[name]; + output = target[field.oneof]; + output.oneofKind = group.oneofKind; + if (fieldValue == void 0) { + delete output[name]; + continue; + } + } else { + fieldValue = input[name]; + output = target; + if (fieldValue == void 0) { + continue; + } + } + if (field.repeat) + output[name].length = fieldValue.length; + switch (field.kind) { + case "scalar": + case "enum": + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = fieldValue[i]; + else + output[name] = fieldValue; + break; + case "message": + let T = field.T(); + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = T.create(fieldValue[i]); + else if (output[name] === void 0) + output[name] = T.create(fieldValue); + else + T.mergePartial(output[name], fieldValue); + break; + case "map": + switch (field.V.kind) { + case "scalar": + case "enum": + Object.assign(output[name], fieldValue); + break; + case "message": + let T2 = field.V.T(); + for (let k of Object.keys(fieldValue)) + output[name][k] = T2.create(fieldValue[k]); + break; + } + break; + } } - }; - exports2.BinaryReader = BinaryReader; + } + exports2.reflectionMergePartial = reflectionMergePartial; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/assert.js -var require_assert = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js +var require_reflection_equals = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; - function assert(condition, msg) { - if (!condition) { - throw new Error(msg); + exports2.reflectionEquals = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionEquals(info6, a, b) { + if (a === b) + return true; + if (!a || !b) + return false; + for (let field of info6.fields) { + let localName = field.localName; + let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; + let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; + switch (field.kind) { + case "enum": + case "scalar": + let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) + return false; + break; + case "map": + if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) + return false; + break; + case "message": + let T = field.T(); + if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) + return false; + break; + } } + return true; } - exports2.assert = assert; - function assertNever2(value, msg) { - throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); - } - exports2.assertNever = assertNever2; - var FLOAT32_MAX = 34028234663852886e22; - var FLOAT32_MIN = -34028234663852886e22; - var UINT32_MAX = 4294967295; - var INT32_MAX = 2147483647; - var INT32_MIN = -2147483648; - function assertInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid int 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) - throw new Error("invalid int 32: " + arg); + exports2.reflectionEquals = reflectionEquals; + var objectValues = Object.values; + function primitiveEq(type2, a, b) { + if (a === b) + return true; + if (type2 !== reflection_info_1.ScalarType.BYTES) + return false; + let ba = a; + let bb = b; + if (ba.length !== bb.length) + return false; + for (let i = 0; i < ba.length; i++) + if (ba[i] != bb[i]) + return false; + return true; } - exports2.assertInt32 = assertInt32; - function assertUInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid uint 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) - throw new Error("invalid uint 32: " + arg); + function repeatedPrimitiveEq(type2, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!primitiveEq(type2, a[i], b[i])) + return false; + return true; } - exports2.assertUInt32 = assertUInt32; - function assertFloat32(arg) { - if (typeof arg !== "number") - throw new Error("invalid float 32: " + typeof arg); - if (!Number.isFinite(arg)) - return; - if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) - throw new Error("invalid float 32: " + arg); + function repeatedMsgEq(type2, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!type2.equals(a[i], b[i])) + return false; + return true; } - exports2.assertFloat32 = assertFloat32; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js -var require_binary_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js +var require_message_type = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var assert_1 = require_assert(); - var defaultsWrite = { - writeUnknownFields: true, - writerFactory: () => new BinaryWriter() - }; - function binaryWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; - } - exports2.binaryWriteOptions = binaryWriteOptions; - var BinaryWriter = class { - constructor(textEncoder) { - this.stack = []; - this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); - this.chunks = []; - this.buf = []; + exports2.MessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_type_check_1 = require_reflection_type_check(); + var reflection_json_reader_1 = require_reflection_json_reader(); + var reflection_json_writer_1 = require_reflection_json_writer(); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + var reflection_create_1 = require_reflection_create(); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + var json_typings_1 = require_json_typings(); + var json_format_contract_1 = require_json_format_contract(); + var reflection_equals_1 = require_reflection_equals(); + var binary_writer_1 = require_binary_writer(); + var binary_reader_1 = require_binary_reader(); + var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); + var messageTypeDescriptor = baseDescriptors[message_type_contract_1.MESSAGE_TYPE] = {}; + var MessageType = class { + constructor(name, fields, options) { + this.defaultCheckDepth = 16; + this.typeName = name; + this.fields = fields.map(reflection_info_1.normalizeFieldInfo); + this.options = options !== null && options !== void 0 ? options : {}; + messageTypeDescriptor.value = this; + this.messagePrototype = Object.create(null, baseDescriptors); + this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); + this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); + this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); + this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); + this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); } - /** - * Return all bytes written and reset this writer. - */ - finish() { - this.chunks.push(new Uint8Array(this.buf)); - let len = 0; - for (let i = 0; i < this.chunks.length; i++) - len += this.chunks[i].length; - let bytes = new Uint8Array(len); - let offset = 0; - for (let i = 0; i < this.chunks.length; i++) { - bytes.set(this.chunks[i], offset); - offset += this.chunks[i].length; + create(value) { + let message = reflection_create_1.reflectionCreate(this); + if (value !== void 0) { + reflection_merge_partial_1.reflectionMergePartial(this, message, value); } - this.chunks = []; - return bytes; - } - /** - * Start a new fork for length-delimited data like a message - * or a packed repeated field. - * - * Must be joined later with `join()`. - */ - fork() { - this.stack.push({ chunks: this.chunks, buf: this.buf }); - this.chunks = []; - this.buf = []; - return this; - } - /** - * Join the last fork. Write its length and bytes, then - * return to the previous state. - */ - join() { - let chunk = this.finish(); - let prev = this.stack.pop(); - if (!prev) - throw new Error("invalid state, fork stack empty"); - this.chunks = prev.chunks; - this.buf = prev.buf; - this.uint32(chunk.byteLength); - return this.raw(chunk); + return message; } /** - * Writes a tag (field number and wire type). - * - * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * Clone the message. * - * Generated code should compute the tag ahead of time and call `uint32()`. - */ - tag(fieldNo, type2) { - return this.uint32((fieldNo << 3 | type2) >>> 0); - } - /** - * Write a chunk of raw bytes. - */ - raw(chunk) { - if (this.buf.length) { - this.chunks.push(new Uint8Array(this.buf)); - this.buf = []; - } - this.chunks.push(chunk); - return this; - } - /** - * Write a `uint32` value, an unsigned 32 bit varint. + * Unknown fields are discarded. */ - uint32(value) { - assert_1.assertUInt32(value); - while (value > 127) { - this.buf.push(value & 127 | 128); - value = value >>> 7; - } - this.buf.push(value); - return this; + clone(message) { + let copy = this.create(); + reflection_merge_partial_1.reflectionMergePartial(this, copy, message); + return copy; } /** - * Write a `int32` value, a signed 32 bit varint. + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. */ - int32(value) { - assert_1.assertInt32(value); - goog_varint_1.varint32write(value, this.buf); - return this; + equals(a, b) { + return reflection_equals_1.reflectionEquals(this, a, b); } /** - * Write a `bool` value, a variant. + * Is the given value assignable to our message type + * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - bool(value) { - this.buf.push(value ? 1 : 0); - return this; + is(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, false); } /** - * Write a `bytes` value, length-delimited arbitrary data. + * Is the given value assignable to our message type, + * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - bytes(value) { - this.uint32(value.byteLength); - return this.raw(value); + isAssignable(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, true); } /** - * Write a `string` value, length-delimited data converted to UTF-8 text. + * Copy partial data into the target message. */ - string(value) { - let chunk = this.textEncoder.encode(value); - this.uint32(chunk.byteLength); - return this.raw(chunk); + mergePartial(target, source) { + reflection_merge_partial_1.reflectionMergePartial(this, target, source); } /** - * Write a `float` value, 32-bit floating point number. + * Create a new message from binary format. */ - float(value) { - assert_1.assertFloat32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setFloat32(0, value, true); - return this.raw(chunk); + fromBinary(data, options) { + let opt = binary_reader_1.binaryReadOptions(options); + return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); } /** - * Write a `double` value, a 64-bit floating point number. + * Read a new message from a JSON value. */ - double(value) { - let chunk = new Uint8Array(8); - new DataView(chunk.buffer).setFloat64(0, value, true); - return this.raw(chunk); + fromJson(json2, options) { + return this.internalJsonRead(json2, json_format_contract_1.jsonReadOptions(options)); } /** - * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + * Read a new message from a JSON string. + * This is equivalent to `T.fromJson(JSON.parse(json))`. */ - fixed32(value) { - assert_1.assertUInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setUint32(0, value, true); - return this.raw(chunk); + fromJsonString(json2, options) { + let value = JSON.parse(json2); + return this.fromJson(value, options); } /** - * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. + * Write the message to canonical JSON value. */ - sfixed32(value) { - assert_1.assertInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setInt32(0, value, true); - return this.raw(chunk); + toJson(message, options) { + return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); } /** - * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + * Convert the message to canonical JSON string. + * This is equivalent to `JSON.stringify(T.toJson(t))` */ - sint32(value) { - assert_1.assertInt32(value); - value = (value << 1 ^ value >> 31) >>> 0; - goog_varint_1.varint32write(value, this.buf); - return this; + toJsonString(message, options) { + var _a; + let value = this.toJson(message, options); + return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); } /** - * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + * Write the message to binary format. */ - sfixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbLong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + toBinary(message, options) { + let opt = binary_writer_1.binaryWriteOptions(options); + return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); } /** - * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + * This is an internal method. If you just want to read a message from + * JSON, use `fromJson()` or `fromJsonString()`. + * + * Reads JSON value and merges the fields into the target + * according to protobuf rules. If the target is omitted, + * a new instance is created first. */ - fixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbULong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + internalJsonRead(json2, options, target) { + if (json2 !== null && typeof json2 == "object" && !Array.isArray(json2)) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refJsonReader.read(json2, message, options); + return message; + } + throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json2)}.`); } /** - * Write a `int64` value, a signed 64-bit varint. + * This is an internal method. If you just want to write a message + * to JSON, use `toJson()` or `toJsonString(). + * + * Writes JSON value and returns it. */ - int64(value) { - let long = pb_long_1.PbLong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + internalJsonWrite(message, options) { + return this.refJsonWriter.write(message, options); } /** - * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + * This is an internal method. If you just want to write a message + * in binary format, use `toBinary()`. + * + * Serializes the message in binary format and appends it to the given + * writer. Returns passed writer. */ - sint64(value) { - let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; - goog_varint_1.varint64write(lo, hi, this.buf); - return this; + internalBinaryWrite(message, writer, options) { + this.refBinWriter.write(message, writer, options); + return writer; } /** - * Write a `uint64` value, an unsigned 64-bit varint. + * This is an internal method. If you just want to read a message from + * binary data, use `fromBinary()`. + * + * Reads data from binary format and merges the fields into + * the target according to protobuf rules. If the target is + * omitted, a new instance is created first. */ - uint64(value) { - let long = pb_long_1.PbULong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refBinReader.read(reader, message, options, length); + return message; } }; - exports2.BinaryWriter = BinaryWriter; + exports2.MessageType = MessageType; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js -var require_json_format_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js +var require_reflection_contains_message_type = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; - var defaultsWrite = { - emitDefaultValues: false, - enumAsInteger: false, - useProtoFieldName: false, - prettySpaces: 0 - }; - var defaultsRead = { - ignoreUnknownFields: false - }; - function jsonReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - exports2.jsonReadOptions = jsonReadOptions; - function jsonWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; - } - exports2.jsonWriteOptions = jsonWriteOptions; - function mergeJsonOptions(a, b) { - var _a, _b; - let c = Object.assign(Object.assign({}, a), b); - c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; - return c; + exports2.containsMessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + function containsMessageType(msg) { + return msg[message_type_contract_1.MESSAGE_TYPE] != null; } - exports2.mergeJsonOptions = mergeJsonOptions; + exports2.containsMessageType = containsMessageType; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js -var require_message_type_contract = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js +var require_enum_object = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MESSAGE_TYPE = void 0; - exports2.MESSAGE_TYPE = /* @__PURE__ */ Symbol.for("protobuf-ts/message-type"); + exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; + function isEnumObject(arg) { + if (typeof arg != "object" || arg === null) { + return false; + } + if (!arg.hasOwnProperty(0)) { + return false; + } + for (let k of Object.keys(arg)) { + let num = parseInt(k); + if (!Number.isNaN(num)) { + let nam = arg[num]; + if (nam === void 0) + return false; + if (arg[nam] !== num) + return false; + } else { + let num2 = arg[k]; + if (num2 === void 0) + return false; + if (typeof num2 !== "number") + return false; + if (arg[num2] === void 0) + return false; + } + } + return true; + } + exports2.isEnumObject = isEnumObject; + function listEnumValues(enumObject) { + if (!isEnumObject(enumObject)) + throw new Error("not a typescript enum object"); + let values = []; + for (let [name, number] of Object.entries(enumObject)) + if (typeof number == "number") + values.push({ name, number }); + return values; + } + exports2.listEnumValues = listEnumValues; + function listEnumNames(enumObject) { + return listEnumValues(enumObject).map((val) => val.name); + } + exports2.listEnumNames = listEnumNames; + function listEnumNumbers(enumObject) { + return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); + } + exports2.listEnumNumbers = listEnumNumbers; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js -var require_lower_camel_case = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { +// node_modules/@protobuf-ts/runtime/build/commonjs/index.js +var require_commonjs16 = __commonJS({ + "node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.lowerCamelCase = void 0; - function lowerCamelCase(snakeCase) { - let capNext = false; - const sb = []; - for (let i = 0; i < snakeCase.length; i++) { - let next = snakeCase.charAt(i); - if (next == "_") { - capNext = true; - } else if (/\d/.test(next)) { - sb.push(next); - capNext = true; - } else if (capNext) { - sb.push(next.toUpperCase()); - capNext = false; - } else if (i == 0) { - sb.push(next.toLowerCase()); - } else { - sb.push(next); - } - } - return sb.join(""); - } - exports2.lowerCamelCase = lowerCamelCase; + var json_typings_1 = require_json_typings(); + Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: function() { + return json_typings_1.typeofJsonValue; + } }); + Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: function() { + return json_typings_1.isJsonObject; + } }); + var base64_1 = require_base642(); + Object.defineProperty(exports2, "base64decode", { enumerable: true, get: function() { + return base64_1.base64decode; + } }); + Object.defineProperty(exports2, "base64encode", { enumerable: true, get: function() { + return base64_1.base64encode; + } }); + var protobufjs_utf8_1 = require_protobufjs_utf8(); + Object.defineProperty(exports2, "utf8read", { enumerable: true, get: function() { + return protobufjs_utf8_1.utf8read; + } }); + var binary_format_contract_1 = require_binary_format_contract(); + Object.defineProperty(exports2, "WireType", { enumerable: true, get: function() { + return binary_format_contract_1.WireType; + } }); + Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: function() { + return binary_format_contract_1.mergeBinaryOptions; + } }); + Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: function() { + return binary_format_contract_1.UnknownFieldHandler; + } }); + var binary_reader_1 = require_binary_reader(); + Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: function() { + return binary_reader_1.BinaryReader; + } }); + Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: function() { + return binary_reader_1.binaryReadOptions; + } }); + var binary_writer_1 = require_binary_writer(); + Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: function() { + return binary_writer_1.BinaryWriter; + } }); + Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: function() { + return binary_writer_1.binaryWriteOptions; + } }); + var pb_long_1 = require_pb_long(); + Object.defineProperty(exports2, "PbLong", { enumerable: true, get: function() { + return pb_long_1.PbLong; + } }); + Object.defineProperty(exports2, "PbULong", { enumerable: true, get: function() { + return pb_long_1.PbULong; + } }); + var json_format_contract_1 = require_json_format_contract(); + Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: function() { + return json_format_contract_1.jsonReadOptions; + } }); + Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: function() { + return json_format_contract_1.jsonWriteOptions; + } }); + Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: function() { + return json_format_contract_1.mergeJsonOptions; + } }); + var message_type_contract_1 = require_message_type_contract(); + Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: function() { + return message_type_contract_1.MESSAGE_TYPE; + } }); + var message_type_1 = require_message_type(); + Object.defineProperty(exports2, "MessageType", { enumerable: true, get: function() { + return message_type_1.MessageType; + } }); + var reflection_info_1 = require_reflection_info(); + Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: function() { + return reflection_info_1.ScalarType; + } }); + Object.defineProperty(exports2, "LongType", { enumerable: true, get: function() { + return reflection_info_1.LongType; + } }); + Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: function() { + return reflection_info_1.RepeatType; + } }); + Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: function() { + return reflection_info_1.normalizeFieldInfo; + } }); + Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: function() { + return reflection_info_1.readFieldOptions; + } }); + Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: function() { + return reflection_info_1.readFieldOption; + } }); + Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: function() { + return reflection_info_1.readMessageOption; + } }); + var reflection_type_check_1 = require_reflection_type_check(); + Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: function() { + return reflection_type_check_1.ReflectionTypeCheck; + } }); + var reflection_create_1 = require_reflection_create(); + Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: function() { + return reflection_create_1.reflectionCreate; + } }); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: function() { + return reflection_scalar_default_1.reflectionScalarDefault; + } }); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: function() { + return reflection_merge_partial_1.reflectionMergePartial; + } }); + var reflection_equals_1 = require_reflection_equals(); + Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: function() { + return reflection_equals_1.reflectionEquals; + } }); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: function() { + return reflection_binary_reader_1.ReflectionBinaryReader; + } }); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: function() { + return reflection_binary_writer_1.ReflectionBinaryWriter; + } }); + var reflection_json_reader_1 = require_reflection_json_reader(); + Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: function() { + return reflection_json_reader_1.ReflectionJsonReader; + } }); + var reflection_json_writer_1 = require_reflection_json_writer(); + Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: function() { + return reflection_json_writer_1.ReflectionJsonWriter; + } }); + var reflection_contains_message_type_1 = require_reflection_contains_message_type(); + Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: function() { + return reflection_contains_message_type_1.containsMessageType; + } }); + var oneof_1 = require_oneof(); + Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: function() { + return oneof_1.isOneofGroup; + } }); + Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: function() { + return oneof_1.setOneofValue; + } }); + Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: function() { + return oneof_1.getOneofValue; + } }); + Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: function() { + return oneof_1.clearOneofValue; + } }); + Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: function() { + return oneof_1.getSelectedOneofValue; + } }); + var enum_object_1 = require_enum_object(); + Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: function() { + return enum_object_1.listEnumValues; + } }); + Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: function() { + return enum_object_1.listEnumNames; + } }); + Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: function() { + return enum_object_1.listEnumNumbers; + } }); + Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: function() { + return enum_object_1.isEnumObject; + } }); + var lower_camel_case_1 = require_lower_camel_case(); + Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: function() { + return lower_camel_case_1.lowerCamelCase; + } }); + var assert_1 = require_assert(); + Object.defineProperty(exports2, "assert", { enumerable: true, get: function() { + return assert_1.assert; + } }); + Object.defineProperty(exports2, "assertNever", { enumerable: true, get: function() { + return assert_1.assertNever; + } }); + Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: function() { + return assert_1.assertInt32; + } }); + Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: function() { + return assert_1.assertUInt32; + } }); + Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: function() { + return assert_1.assertFloat32; + } }); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js -var require_reflection_info = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js +var require_reflection_info2 = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; - var lower_camel_case_1 = require_lower_camel_case(); - var ScalarType; - (function(ScalarType2) { - ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; - ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; - ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; - ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; - ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; - ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; - ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; - ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; - ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; - ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; - ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; - ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; - ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; - ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; - ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; - })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); - var LongType; - (function(LongType2) { - LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; - LongType2[LongType2["STRING"] = 1] = "STRING"; - LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; - })(LongType = exports2.LongType || (exports2.LongType = {})); - var RepeatType; - (function(RepeatType2) { - RepeatType2[RepeatType2["NO"] = 0] = "NO"; - RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; - RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; - })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); - function normalizeFieldInfo(field) { - var _a, _b, _c, _d; - field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); - field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); - field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; - field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; - return field; + exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; + var runtime_1 = require_commonjs16(); + function normalizeMethodInfo(method, service) { + var _a, _b, _c; + let m = method; + m.service = service; + m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); + m.serverStreaming = !!m.serverStreaming; + m.clientStreaming = !!m.clientStreaming; + m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; + m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; + return m; } - exports2.normalizeFieldInfo = normalizeFieldInfo; - function readFieldOptions(messageType, fieldName, extensionName, extensionType) { + exports2.normalizeMethodInfo = normalizeMethodInfo; + function readMethodOptions(service, methodName, extensionName, extensionType) { var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } - exports2.readFieldOptions = readFieldOptions; - function readFieldOption(messageType, fieldName, extensionName, extensionType) { + exports2.readMethodOptions = readMethodOptions; + function readMethodOption(service, methodName, extensionName, extensionType) { var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; if (!options) { return void 0; } @@ -112776,2425 +112573,2780 @@ var require_reflection_info = __commonJS({ } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.readFieldOption = readFieldOption; - function readMessageOption(messageType, extensionName, extensionType) { - const options = messageType.options; + exports2.readMethodOption = readMethodOption; + function readServiceOption(service, extensionName, extensionType) { + const options = service.options; + if (!options) { + return void 0; + } const optionVal = options[extensionName]; if (optionVal === void 0) { return optionVal; } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - exports2.readMessageOption = readMessageOption; + exports2.readServiceOption = readServiceOption; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js -var require_oneof = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js +var require_service_type = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; - function isOneofGroup(any) { - if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { - return false; - } - switch (typeof any.oneofKind) { - case "string": - if (any[any.oneofKind] === void 0) - return false; - return Object.keys(any).length == 2; - case "undefined": - return Object.keys(any).length == 1; - default: - return false; - } - } - exports2.isOneofGroup = isOneofGroup; - function getOneofValue(oneof, kind) { - return oneof[kind]; - } - exports2.getOneofValue = getOneofValue; - function setOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0) { - oneof[kind] = value; - } - } - exports2.setOneofValue = setOneofValue; - function setUnknownOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0 && kind !== void 0) { - oneof[kind] = value; - } - } - exports2.setUnknownOneofValue = setUnknownOneofValue; - function clearOneofValue(oneof) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = void 0; - } - exports2.clearOneofValue = clearOneofValue; - function getSelectedOneofValue(oneof) { - if (oneof.oneofKind === void 0) { - return void 0; + exports2.ServiceType = void 0; + var reflection_info_1 = require_reflection_info2(); + var ServiceType = class { + constructor(typeName, methods, options) { + this.typeName = typeName; + this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); + this.options = options !== null && options !== void 0 ? options : {}; } - return oneof[oneof.oneofKind]; - } - exports2.getSelectedOneofValue = getSelectedOneofValue; + }; + exports2.ServiceType = ServiceType; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js -var require_reflection_type_check = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js +var require_rpc_error = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionTypeCheck = void 0; - var reflection_info_1 = require_reflection_info(); - var oneof_1 = require_oneof(); - var ReflectionTypeCheck = class { - constructor(info6) { - var _a; - this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; - } - prepare() { - if (this.data) - return; - const req = [], known = [], oneofs = []; - for (let field of this.fields) { - if (field.oneof) { - if (!oneofs.includes(field.oneof)) { - oneofs.push(field.oneof); - req.push(field.oneof); - known.push(field.oneof); - } - } else { - known.push(field.localName); - switch (field.kind) { - case "scalar": - case "enum": - if (!field.opt || field.repeat) - req.push(field.localName); - break; - case "message": - if (field.repeat) - req.push(field.localName); - break; - case "map": - req.push(field.localName); - break; - } - } - } - this.data = { req, known, oneofs: Object.values(oneofs) }; - } - /** - * Is the argument a valid message as specified by the - * reflection information? - * - * Checks all field types recursively. The `depth` - * specifies how deep into the structure the check will be. - * - * With a depth of 0, only the presence of fields - * is checked. - * - * With a depth of 1 or more, the field types are checked. - * - * With a depth of 2 or more, the members of map, repeated - * and message fields are checked. - * - * Message fields will be checked recursively with depth - 1. - * - * The number of map entries / repeated values being checked - * is < depth. - */ - is(message, depth, allowExcessProperties = false) { - if (depth < 0) - return true; - if (message === null || message === void 0 || typeof message != "object") - return false; - this.prepare(); - let keys = Object.keys(message), data = this.data; - if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) - return false; - if (!allowExcessProperties) { - if (keys.some((k) => !data.known.includes(k))) - return false; - } - if (depth < 1) { - return true; - } - for (const name of data.oneofs) { - const group = message[name]; - if (!oneof_1.isOneofGroup(group)) - return false; - if (group.oneofKind === void 0) - continue; - const field = this.fields.find((f) => f.localName === group.oneofKind); - if (!field) - return false; - if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) - return false; - } - for (const field of this.fields) { - if (field.oneof !== void 0) - continue; - if (!this.field(message[field.localName], field, allowExcessProperties, depth)) - return false; - } - return true; - } - field(arg, field, allowExcessProperties, depth) { - let repeated = field.repeat; - switch (field.kind) { - case "scalar": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, field.T, depth, field.L); - return this.scalar(arg, field.T, field.L); - case "enum": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); - return this.scalar(arg, reflection_info_1.ScalarType.INT32); - case "message": - if (arg === void 0) - return true; - if (repeated) - return this.messages(arg, field.T(), allowExcessProperties, depth); - return this.message(arg, field.T(), allowExcessProperties, depth); - case "map": - if (typeof arg != "object" || arg === null) - return false; - if (depth < 2) - return true; - if (!this.mapKeys(arg, field.K, depth)) - return false; - switch (field.V.kind) { - case "scalar": - return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); - case "enum": - return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); - case "message": - return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); - } - break; - } - return true; - } - message(arg, type2, allowExcessProperties, depth) { - if (allowExcessProperties) { - return type2.isAssignable(arg, depth); - } - return type2.is(arg, depth); - } - messages(arg, type2, allowExcessProperties, depth) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (allowExcessProperties) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type2.isAssignable(arg[i], depth - 1)) - return false; - } else { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type2.is(arg[i], depth - 1)) - return false; - } - return true; + exports2.RpcError = void 0; + var RpcError = class extends Error { + constructor(message, code = "UNKNOWN", meta) { + super(message); + this.name = "RpcError"; + Object.setPrototypeOf(this, new.target.prototype); + this.code = code; + this.meta = meta !== null && meta !== void 0 ? meta : {}; } - scalar(arg, type2, longType) { - let argType = typeof arg; - switch (type2) { - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - switch (longType) { - case reflection_info_1.LongType.BIGINT: - return argType == "bigint"; - case reflection_info_1.LongType.NUMBER: - return argType == "number" && !isNaN(arg); - default: - return argType == "string"; - } - case reflection_info_1.ScalarType.BOOL: - return argType == "boolean"; - case reflection_info_1.ScalarType.STRING: - return argType == "string"; - case reflection_info_1.ScalarType.BYTES: - return arg instanceof Uint8Array; - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return argType == "number" && !isNaN(arg); - default: - return argType == "number" && Number.isInteger(arg); + toString() { + const l = [this.name + ": " + this.message]; + if (this.code) { + l.push(""); + l.push("Code: " + this.code); } - } - scalars(arg, type2, depth, longType) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (Array.isArray(arg)) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!this.scalar(arg[i], type2, longType)) - return false; + if (this.serviceName && this.methodName) { + l.push("Method: " + this.serviceName + "/" + this.methodName); } - return true; - } - mapKeys(map2, type2, depth) { - let keys = Object.keys(map2); - switch (type2) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type2, depth); - case reflection_info_1.ScalarType.BOOL: - return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type2, depth); - default: - return this.scalars(keys, type2, depth, reflection_info_1.LongType.STRING); + let m = Object.entries(this.meta); + if (m.length) { + l.push(""); + l.push("Meta:"); + for (let [k, v] of m) { + l.push(` ${k}: ${v}`); + } } + return l.join("\n"); } }; - exports2.ReflectionTypeCheck = ReflectionTypeCheck; + exports2.RpcError = RpcError; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js -var require_reflection_long_convert = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js +var require_rpc_options = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionLongConvert = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionLongConvert(long, type2) { - switch (type2) { - case reflection_info_1.LongType.BIGINT: - return long.toBigInt(); - case reflection_info_1.LongType.NUMBER: - return long.toNumber(); - default: - return long.toString(); + exports2.mergeRpcOptions = void 0; + var runtime_1 = require_commonjs16(); + function mergeRpcOptions(defaults, options) { + if (!options) + return defaults; + let o = {}; + copy(defaults, o); + copy(options, o); + for (let key of Object.keys(options)) { + let val = options[key]; + switch (key) { + case "jsonOptions": + o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + break; + case "binaryOptions": + o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + break; + case "meta": + o.meta = {}; + copy(defaults.meta, o.meta); + copy(options.meta, o.meta); + break; + case "interceptors": + o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + break; + } + } + return o; + } + exports2.mergeRpcOptions = mergeRpcOptions; + function copy(a, into) { + if (!a) + return; + let c = into; + for (let [k, v] of Object.entries(a)) { + if (v instanceof Date) + c[k] = new Date(v.getTime()); + else if (Array.isArray(v)) + c[k] = v.concat(); + else + c[k] = v; } } - exports2.reflectionLongConvert = reflectionLongConvert; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js -var require_reflection_json_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js +var require_deferred = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonReader = void 0; - var json_typings_1 = require_json_typings(); - var base64_1 = require_base642(); - var reflection_info_1 = require_reflection_info(); - var pb_long_1 = require_pb_long(); - var assert_1 = require_assert(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var ReflectionJsonReader = class { - constructor(info6) { - this.info = info6; - } - prepare() { - var _a; - if (this.fMap === void 0) { - this.fMap = {}; - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - for (const field of fieldsInput) { - this.fMap[field.name] = field; - this.fMap[field.jsonName] = field; - this.fMap[field.localName] = field; - } + exports2.Deferred = exports2.DeferredState = void 0; + var DeferredState; + (function(DeferredState2) { + DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; + DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; + DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; + })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); + var Deferred = class { + /** + * @param preventUnhandledRejectionWarning - prevents the warning + * "Unhandled Promise rejection" by adding a noop rejection handler. + * Working with calls returned from the runtime-rpc package in an + * async function usually means awaiting one call property after + * the other. This means that the "status" is not being awaited when + * an earlier await for the "headers" is rejected. This causes the + * "unhandled promise reject" warning. A more correct behaviour for + * calls might be to become aware whether at least one of the + * promises is handled and swallow the rejection warning for the + * others. + */ + constructor(preventUnhandledRejectionWarning = true) { + this._state = DeferredState.PENDING; + this._promise = new Promise((resolve2, reject) => { + this._resolve = resolve2; + this._reject = reject; + }); + if (preventUnhandledRejectionWarning) { + this._promise.catch((_) => { + }); } } - // Cannot parse JSON for #. - assert(condition, fieldName, jsonValue) { - if (!condition) { - let what = json_typings_1.typeofJsonValue(jsonValue); - if (what == "number" || what == "boolean") - what = jsonValue.toString(); - throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); - } + /** + * Get the current state of the promise. + */ + get state() { + return this._state; } /** - * Reads a message from canonical JSON format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. + * Get the deferred promise. */ - read(input, message, options) { - this.prepare(); - const oneofsHandled = []; - for (const [jsonKey, jsonValue] of Object.entries(input)) { - const field = this.fMap[jsonKey]; - if (!field) { - if (!options.ignoreUnknownFields) - throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); - continue; - } - const localName = field.localName; - let target; - if (field.oneof) { - if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { - continue; - } - if (oneofsHandled.includes(field.oneof)) - throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); - oneofsHandled.push(field.oneof); - target = message[field.oneof] = { - oneofKind: localName - }; - } else { - target = message; - } - if (field.kind == "map") { - if (jsonValue === null) { - continue; - } - this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); - const fieldObj = target[localName]; - for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { - this.assert(jsonObjValue !== null, field.name + " map value", null); - let val; - switch (field.V.kind) { - case "message": - val = field.V.T().internalJsonRead(jsonObjValue, options); - break; - case "enum": - val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); - break; - } - this.assert(val !== void 0, field.name + " map value", jsonObjValue); - let key = jsonObjKey; - if (field.K == reflection_info_1.ScalarType.BOOL) - key = key == "true" ? true : key == "false" ? false : key; - key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); - fieldObj[key] = val; - } - } else if (field.repeat) { - if (jsonValue === null) - continue; - this.assert(Array.isArray(jsonValue), field.name, jsonValue); - const fieldArr = target[localName]; - for (const jsonItem of jsonValue) { - this.assert(jsonItem !== null, field.name, null); - let val; - switch (field.kind) { - case "message": - val = field.T().internalJsonRead(jsonItem, options); - break; - case "enum": - val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonItem, field.T, field.L, field.name); - break; - } - this.assert(val !== void 0, field.name, jsonValue); - fieldArr.push(val); - } - } else { - switch (field.kind) { - case "message": - if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { - this.assert(field.oneof === void 0, field.name + " (oneof member)", null); - continue; - } - target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); - break; - case "enum": - if (jsonValue === null) - continue; - let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - target[localName] = val; - break; - case "scalar": - if (jsonValue === null) - continue; - target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); - break; - } - } - } + get promise() { + return this._promise; } /** - * Returns `false` for unrecognized string representations. - * - * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + * Resolve the promise. Throws if the promise is already resolved or rejected. */ - enum(type2, json2, fieldName, ignoreUnknownFields) { - if (type2[0] == "google.protobuf.NullValue") - assert_1.assert(json2 === null || json2 === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} only accepts null.`); - if (json2 === null) - return 0; - switch (typeof json2) { - case "number": - assert_1.assert(Number.isInteger(json2), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json2}.`); - return json2; - case "string": - let localEnumName = json2; - if (type2[2] && json2.substring(0, type2[2].length) === type2[2]) - localEnumName = json2.substring(type2[2].length); - let enumNumber = type2[1][localEnumName]; - if (typeof enumNumber === "undefined" && ignoreUnknownFields) { - return false; - } - assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type2[0]} has no value for "${json2}".`); - return enumNumber; - } - assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json2}".`); + resolve(value) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); + this._resolve(value); + this._state = DeferredState.RESOLVED; } - scalar(json2, type2, longType, fieldName) { - let e; - try { - switch (type2) { - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - if (json2 === null) - return 0; - if (json2 === "NaN") - return Number.NaN; - if (json2 === "Infinity") - return Number.POSITIVE_INFINITY; - if (json2 === "-Infinity") - return Number.NEGATIVE_INFINITY; - if (json2 === "") { - e = "empty string"; - break; - } - if (typeof json2 == "string" && json2.trim().length !== json2.length) { - e = "extra whitespace"; - break; - } - if (typeof json2 != "string" && typeof json2 != "number") { - break; - } - let float2 = Number(json2); - if (Number.isNaN(float2)) { - e = "not a number"; - break; - } - if (!Number.isFinite(float2)) { - e = "too large or small"; - break; - } - if (type2 == reflection_info_1.ScalarType.FLOAT) - assert_1.assertFloat32(float2); - return float2; - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - if (json2 === null) - return 0; - let int32; - if (typeof json2 == "number") - int32 = json2; - else if (json2 === "") - e = "empty string"; - else if (typeof json2 == "string") { - if (json2.trim().length !== json2.length) - e = "extra whitespace"; - else - int32 = Number(json2); - } - if (int32 === void 0) - break; - if (type2 == reflection_info_1.ScalarType.UINT32) - assert_1.assertUInt32(int32); - else - assert_1.assertInt32(int32); - return int32; - // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - if (json2 === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - if (typeof json2 != "number" && typeof json2 != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json2), longType); - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.UINT64: - if (json2 === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - if (typeof json2 != "number" && typeof json2 != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json2), longType); - // bool: - case reflection_info_1.ScalarType.BOOL: - if (json2 === null) - return false; - if (typeof json2 !== "boolean") - break; - return json2; - // string: - case reflection_info_1.ScalarType.STRING: - if (json2 === null) - return ""; - if (typeof json2 !== "string") { - e = "extra whitespace"; - break; - } - try { - encodeURIComponent(json2); - } catch (e2) { - e2 = "invalid UTF8"; - break; - } - return json2; - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - if (json2 === null || json2 === "") - return new Uint8Array(0); - if (typeof json2 !== "string") - break; - return base64_1.base64decode(json2); + /** + * Reject the promise. Throws if the promise is already resolved or rejected. + */ + reject(reason) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); + this._reject(reason); + this._state = DeferredState.REJECTED; + } + /** + * Resolve the promise. Ignore if not pending. + */ + resolvePending(val) { + if (this._state === DeferredState.PENDING) + this.resolve(val); + } + /** + * Reject the promise. Ignore if not pending. + */ + rejectPending(reason) { + if (this._state === DeferredState.PENDING) + this.reject(reason); + } + }; + exports2.Deferred = Deferred; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js +var require_rpc_output_stream = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RpcOutputStreamController = void 0; + var deferred_1 = require_deferred(); + var runtime_1 = require_commonjs16(); + var RpcOutputStreamController = class { + constructor() { + this._lis = { + nxt: [], + msg: [], + err: [], + cmp: [] + }; + this._closed = false; + this._itState = { q: [] }; + } + // --- RpcOutputStream callback API + onNext(callback) { + return this.addLis(callback, this._lis.nxt); + } + onMessage(callback) { + return this.addLis(callback, this._lis.msg); + } + onError(callback) { + return this.addLis(callback, this._lis.err); + } + onComplete(callback) { + return this.addLis(callback, this._lis.cmp); + } + addLis(callback, list) { + list.push(callback); + return () => { + let i = list.indexOf(callback); + if (i >= 0) + list.splice(i, 1); + }; + } + // remove all listeners + clearLis() { + for (let l of Object.values(this._lis)) + l.splice(0, l.length); + } + // --- Controller API + /** + * Is this stream already closed by a completion or error? + */ + get closed() { + return this._closed !== false; + } + /** + * Emit message, close with error, or close successfully, but only one + * at a time. + * Can be used to wrap a stream by using the other stream's `onNext`. + */ + notifyNext(message, error3, complete) { + runtime_1.assert((message ? 1 : 0) + (error3 ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); + if (message) + this.notifyMessage(message); + if (error3) + this.notifyError(error3); + if (complete) + this.notifyComplete(); + } + /** + * Emits a new message. Throws if stream is closed. + * + * Triggers onNext and onMessage callbacks. + */ + notifyMessage(message) { + runtime_1.assert(!this.closed, "stream is closed"); + this.pushIt({ value: message, done: false }); + this._lis.msg.forEach((l) => l(message)); + this._lis.nxt.forEach((l) => l(message, void 0, false)); + } + /** + * Closes the stream with an error. Throws if stream is closed. + * + * Triggers onNext and onError callbacks. + */ + notifyError(error3) { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = error3; + this.pushIt(error3); + this._lis.err.forEach((l) => l(error3)); + this._lis.nxt.forEach((l) => l(void 0, error3, false)); + this.clearLis(); + } + /** + * Closes the stream successfully. Throws if stream is closed. + * + * Triggers onNext and onComplete callbacks. + */ + notifyComplete() { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = true; + this.pushIt({ value: null, done: true }); + this._lis.cmp.forEach((l) => l()); + this._lis.nxt.forEach((l) => l(void 0, void 0, true)); + this.clearLis(); + } + /** + * Creates an async iterator (that can be used with `for await {...}`) + * to consume the stream. + * + * Some things to note: + * - If an error occurs, the `for await` will throw it. + * - If an error occurred before the `for await` was started, `for await` + * will re-throw it. + * - If the stream is already complete, the `for await` will be empty. + * - If your `for await` consumes slower than the stream produces, + * for example because you are relaying messages in a slow operation, + * messages are queued. + */ + [Symbol.asyncIterator]() { + if (this._closed === true) + this.pushIt({ value: null, done: true }); + else if (this._closed !== false) + this.pushIt(this._closed); + return { + next: () => { + let state = this._itState; + runtime_1.assert(state, "bad state"); + runtime_1.assert(!state.p, "iterator contract broken"); + let first = state.q.shift(); + if (first) + return "value" in first ? Promise.resolve(first) : Promise.reject(first); + state.p = new deferred_1.Deferred(); + return state.p.promise; } - } catch (error3) { - e = error3.message; + }; + } + // "push" a new iterator result. + // this either resolves a pending promise, or enqueues the result. + pushIt(result) { + let state = this._itState; + if (state.p) { + const p = state.p; + runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); + "value" in result ? p.resolve(result) : p.reject(result); + delete state.p; + } else { + state.q.push(result); } - this.assert(false, fieldName + (e ? " - " + e : ""), json2); } }; - exports2.ReflectionJsonReader = ReflectionJsonReader; + exports2.RpcOutputStreamController = RpcOutputStreamController; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js -var require_reflection_json_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js +var require_unary_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonWriter = void 0; - var base64_1 = require_base642(); - var pb_long_1 = require_pb_long(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var ReflectionJsonWriter = class { - constructor(info6) { - var _a; - this.fields = (_a = info6.fields) !== null && _a !== void 0 ? _a : []; + exports2.UnaryCall = void 0; + var UnaryCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request2; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } /** - * Converts the message to a JSON object, based on the field descriptors. + * If you are only interested in the final outcome of this call, + * you can await it to receive a `FinishedUnaryCall`. */ - write(message, options) { - const json2 = {}, source = message; - for (const field of this.fields) { - if (!field.oneof) { - let jsonValue2 = this.field(field, source[field.localName], options); - if (jsonValue2 !== void 0) - json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; - continue; + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + response, + status, + trailers + }; + }); + } + }; + exports2.UnaryCall = UnaryCall; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js +var require_server_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - const group = source[field.oneof]; - if (group.oneofKind !== field.localName) - continue; - const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; - let jsonValue = this.field(field, group[field.localName], opt); - assert_1.assert(jsonValue !== void 0); - json2[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; } - return json2; - } - field(field, value, options) { - let jsonValue = void 0; - if (field.kind == "map") { - assert_1.assert(typeof value == "object" && value !== null); - const jsonObj = {}; - switch (field.V.kind) { - case "scalar": - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.scalar(field.V.T, entryValue, field.name, false, true); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "message": - const messageType = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.message(messageType, entryValue, field.name, options); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "enum": - const enumInfo = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); - const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) - jsonValue = jsonObj; - } else if (field.repeat) { - assert_1.assert(Array.isArray(value)); - const jsonArr = []; - switch (field.kind) { - case "scalar": - for (let i = 0; i < value.length; i++) { - const val = this.scalar(field.T, value[i], field.name, field.opt, true); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "enum": - const enumInfo = field.T(); - for (let i = 0; i < value.length; i++) { - assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); - const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; - case "message": - const messageType = field.T(); - for (let i = 0; i < value.length; i++) { - const val = this.message(messageType, value[i], field.name, options); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } - break; + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServerStreamingCall = void 0; + var ServerStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request2; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; + } + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * You should first setup some listeners to the `request` to + * see the actual messages the server replied with. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + status, + trailers + }; + }); + } + }; + exports2.ServerStreamingCall = ServerStreamingCall; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js +var require_client_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) - jsonValue = jsonArr; - } else { - switch (field.kind) { - case "scalar": - jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); - break; - case "enum": - jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); - break; - case "message": - jsonValue = this.message(field.T(), value, field.name, options); - break; + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } } - return jsonValue; + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ClientStreamingCall = void 0; + var ClientStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request2; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } /** - * Returns `null` as the default for google.protobuf.NullValue. + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. */ - enum(type2, value, fieldName, optional, emitDefaultValues, enumAsInteger) { - if (type2[0] == "google.protobuf.NullValue") - return !emitDefaultValues && !optional ? void 0 : null; - if (value === void 0) { - assert_1.assert(optional); - return void 0; - } - if (value === 0 && !emitDefaultValues && !optional) - return void 0; - assert_1.assert(typeof value == "number"); - assert_1.assert(Number.isInteger(value)); - if (enumAsInteger || !type2[1].hasOwnProperty(value)) - return value; - if (type2[2]) - return type2[2] + type2[1][value]; - return type2[1][value]; + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - message(type2, value, fieldName, options) { - if (value === void 0) - return options.emitDefaultValues ? null : void 0; - return type2.internalJsonWrite(value, options); + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + response, + status, + trailers + }; + }); } - scalar(type2, value, fieldName, optional, emitDefaultValues) { - if (value === void 0) { - assert_1.assert(optional); - return void 0; + }; + exports2.ClientStreamingCall = ClientStreamingCall; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js +var require_duplex_streaming_call = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { + "use strict"; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - const ed = emitDefaultValues || optional; - switch (type2) { - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertInt32(value); - return value; - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertUInt32(value); - return value; - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.FLOAT: - assert_1.assertFloat32(value); - case reflection_info_1.ScalarType.DOUBLE: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assert(typeof value == "number"); - if (Number.isNaN(value)) - return "NaN"; - if (value === Number.POSITIVE_INFINITY) - return "Infinity"; - if (value === Number.NEGATIVE_INFINITY) - return "-Infinity"; - return value; - // string: - case reflection_info_1.ScalarType.STRING: - if (value === "") - return ed ? "" : void 0; - assert_1.assert(typeof value == "string"); - return value; - // bool: - case reflection_info_1.ScalarType.BOOL: - if (value === false) - return ed ? false : void 0; - assert_1.assert(typeof value == "boolean"); - return value; - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let ulong = pb_long_1.PbULong.from(value); - if (ulong.isZero() && !ed) - return void 0; - return ulong.toString(); - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let long = pb_long_1.PbLong.from(value); - if (long.isZero() && !ed) - return void 0; - return long.toString(); - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - assert_1.assert(value instanceof Uint8Array); - if (!value.byteLength) - return ed ? "" : void 0; - return base64_1.base64encode(value); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DuplexStreamingCall = void 0; + var DuplexStreamingCall = class { + constructor(method, requestHeaders, request2, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request2; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; + } + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter2(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + status, + trailers + }; + }); } }; - exports2.ReflectionJsonWriter = ReflectionJsonWriter; + exports2.DuplexStreamingCall = DuplexStreamingCall; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js -var require_reflection_scalar_default = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js +var require_test_transport = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionScalarDefault = void 0; - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var pb_long_1 = require_pb_long(); - function reflectionScalarDefault(type2, longType = reflection_info_1.LongType.STRING) { - switch (type2) { - case reflection_info_1.ScalarType.BOOL: - return false; - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return 0; - case reflection_info_1.ScalarType.BYTES: - return new Uint8Array(0); - case reflection_info_1.ScalarType.STRING: - return ""; - default: - return 0; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); } - } - exports2.reflectionScalarDefault = reflectionScalarDefault; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js -var require_reflection_binary_reader = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { - "use strict"; + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryReader = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var ReflectionBinaryReader = class { - constructor(info6) { - this.info = info6; + exports2.TestTransport = void 0; + var rpc_error_1 = require_rpc_error(); + var runtime_1 = require_commonjs16(); + var rpc_output_stream_1 = require_rpc_output_stream(); + var rpc_options_1 = require_rpc_options(); + var unary_call_1 = require_unary_call(); + var server_streaming_call_1 = require_server_streaming_call(); + var client_streaming_call_1 = require_client_streaming_call(); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + var TestTransport = class _TestTransport { + /** + * Initialize with mock data. Omitted fields have default value. + */ + constructor(data) { + this.suppressUncaughtRejections = true; + this.headerDelay = 10; + this.responseDelay = 50; + this.betweenResponseDelay = 10; + this.afterResponseDelay = 10; + this.data = data !== null && data !== void 0 ? data : {}; } - prepare() { + /** + * Sent message(s) during the last operation. + */ + get sentMessages() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.sent; + } else if (typeof this.lastInput == "object") { + return [this.lastInput.single]; + } + return []; + } + /** + * Sending message(s) completed? + */ + get sendComplete() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.completed; + } else if (typeof this.lastInput == "object") { + return true; + } + return false; + } + // Creates a promise for response headers from the mock data. + promiseHeaders() { var _a; - if (!this.fieldNoToField) { - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); + const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; + return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); + } + // Creates a promise for a single, valid, message from the mock data. + promiseSingleResponse(method) { + if (this.data.response instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.response); + } + let r; + if (Array.isArray(this.data.response)) { + runtime_1.assert(this.data.response.length > 0); + r = this.data.response[0]; + } else if (this.data.response !== void 0) { + r = this.data.response; + } else { + r = method.O.create(); } + runtime_1.assert(method.O.is(r)); + return Promise.resolve(r); } /** - * Reads a message from binary format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. + * Pushes response messages from the mock data to the output stream. + * If an error response, status or trailers are mocked, the stream is + * closed with the respective error. + * Otherwise, stream is completed successfully. * - * If a message field is already present, it will be merged with the - * new data. + * The returned promise resolves when the stream is closed. It should + * not reject. If it does, code is broken. */ - read(reader, message, options, length) { - this.prepare(); - const end = length === void 0 ? reader.len : reader.pos + length; - while (reader.pos < end) { - const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); - if (!field) { - let u = options.readUnknownField; - if (u == "throw") - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); - continue; + streamResponses(method, stream, abort) { + return __awaiter2(this, void 0, void 0, function* () { + const messages = []; + if (this.data.response === void 0) { + messages.push(method.O.create()); + } else if (Array.isArray(this.data.response)) { + for (let msg of this.data.response) { + runtime_1.assert(method.O.is(msg)); + messages.push(msg); + } + } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { + runtime_1.assert(method.O.is(this.data.response)); + messages.push(this.data.response); } - let target = message, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - target = target[field.oneof]; - if (target.oneofKind !== localName) - target = message[field.oneof] = { - oneofKind: localName - }; + try { + yield delay2(this.responseDelay, abort)(void 0); + } catch (error3) { + stream.notifyError(error3); + return; } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - let L = field.kind == "scalar" ? field.L : void 0; - if (repeated) { - let arr = target[localName]; - if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { - let e = reader.uint32() + reader.pos; - while (reader.pos < e) - arr.push(this.scalar(reader, T, L)); - } else - arr.push(this.scalar(reader, T, L)); - } else - target[localName] = this.scalar(reader, T, L); - break; - case "message": - if (repeated) { - let arr = target[localName]; - let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); - arr.push(msg); - } else - target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); - break; - case "map": - let [mapKey, mapVal] = this.mapEntry(field, reader, options); - target[localName][mapKey] = mapVal; - break; + if (this.data.response instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.response); + return; + } + for (let msg of messages) { + stream.notifyMessage(msg); + try { + yield delay2(this.betweenResponseDelay, abort)(void 0); + } catch (error3) { + stream.notifyError(error3); + return; + } + } + if (this.data.status instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.status); + return; + } + if (this.data.trailers instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.trailers); + return; + } + stream.notifyComplete(); + }); + } + // Creates a promise for response status from the mock data. + promiseStatus() { + var _a; + const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; + return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); + } + // Creates a promise for response trailers from the mock data. + promiseTrailers() { + var _a; + const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; + return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); + } + maybeSuppressUncaught(...promise) { + if (this.suppressUncaughtRejections) { + for (let p of promise) { + p.catch(() => { + }); } } } - /** - * Read a map field, expecting key field = 1, value field = 2 - */ - mapEntry(field, reader, options) { - let length = reader.uint32(); - let end = reader.pos + length; - let key = void 0; - let val = void 0; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case 1: - if (field.K == reflection_info_1.ScalarType.BOOL) - key = reader.bool().toString(); - else - key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); - break; - case 2: - switch (field.V.kind) { - case "scalar": - val = this.scalar(reader, field.V.T, field.V.L); - break; - case "enum": - val = reader.int32(); - break; - case "message": - val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); - break; - } - break; - default: - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + mergeOptions(options) { + return rpc_options_1.mergeRpcOptions({}, options); + } + unary(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { + }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + } + serverStreaming(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + } + clientStreaming(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { + }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { + }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + } + duplex(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + } + }; + exports2.TestTransport = TestTransport; + TestTransport.defaultHeaders = { + responseHeader: "test" + }; + TestTransport.defaultStatus = { + code: "OK", + detail: "all good" + }; + TestTransport.defaultTrailers = { + responseTrailer: "test" + }; + function delay2(ms, abort) { + return (v) => new Promise((resolve2, reject) => { + if (abort === null || abort === void 0 ? void 0 : abort.aborted) { + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + } else { + const id = setTimeout(() => resolve2(v), ms); + if (abort) { + abort.addEventListener("abort", (ev) => { + clearTimeout(id); + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + }); } } - if (key === void 0) { - let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); - key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + }); + } + var TestInputStream = class { + constructor(data, abort) { + this._completed = false; + this._sent = []; + this.data = data; + this.abort = abort; + } + get sent() { + return this._sent; + } + get completed() { + return this._completed; + } + send(message) { + if (this.data.inputMessage instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputMessage); } - if (val === void 0) - switch (field.V.kind) { - case "scalar": - val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); - break; - case "enum": - val = 0; - break; - case "message": - val = field.V.T().create(); - break; - } - return [key, val]; + const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; + return Promise.resolve(void 0).then(() => { + this._sent.push(message); + }).then(delay2(delayMs, this.abort)); } - scalar(reader, type2, longType) { - switch (type2) { - case reflection_info_1.ScalarType.INT32: - return reader.int32(); - case reflection_info_1.ScalarType.STRING: - return reader.string(); - case reflection_info_1.ScalarType.BOOL: - return reader.bool(); - case reflection_info_1.ScalarType.DOUBLE: - return reader.double(); - case reflection_info_1.ScalarType.FLOAT: - return reader.float(); - case reflection_info_1.ScalarType.INT64: - return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); - case reflection_info_1.ScalarType.UINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); - case reflection_info_1.ScalarType.FIXED32: - return reader.fixed32(); - case reflection_info_1.ScalarType.BYTES: - return reader.bytes(); - case reflection_info_1.ScalarType.UINT32: - return reader.uint32(); - case reflection_info_1.ScalarType.SFIXED32: - return reader.sfixed32(); - case reflection_info_1.ScalarType.SFIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); - case reflection_info_1.ScalarType.SINT32: - return reader.sint32(); - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + complete() { + if (this.data.inputComplete instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputComplete); } + const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; + return Promise.resolve(void 0).then(() => { + this._completed = true; + }).then(delay2(delayMs, this.abort)); } }; - exports2.ReflectionBinaryReader = ReflectionBinaryReader; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js -var require_reflection_binary_writer = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js +var require_rpc_interceptor = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; + var runtime_1 = require_commonjs16(); + function stackIntercept(kind, transport, method, options, input) { + var _a, _b, _c, _d; + if (kind == "unary") { + let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); + for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); + } + return tail(method, input, options); + } + if (kind == "serverStreaming") { + let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); + for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { + const next = tail; + tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); + } + return tail(method, input, options); + } + if (kind == "clientStreaming") { + let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); + for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + } + return tail(method, options); + } + if (kind == "duplex") { + let tail = (mtd, opt) => transport.duplex(mtd, opt); + for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { + const next = tail; + tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + } + return tail(method, options); + } + runtime_1.assertNever(kind); + } + exports2.stackIntercept = stackIntercept; + function stackUnaryInterceptors(transport, method, input, options) { + return stackIntercept("unary", transport, method, options, input); + } + exports2.stackUnaryInterceptors = stackUnaryInterceptors; + function stackServerStreamingInterceptors(transport, method, input, options) { + return stackIntercept("serverStreaming", transport, method, options, input); + } + exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; + function stackClientStreamingInterceptors(transport, method, options) { + return stackIntercept("clientStreaming", transport, method, options); + } + exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; + function stackDuplexStreamingInterceptors(transport, method, options) { + return stackIntercept("duplex", transport, method, options); + } + exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; + } +}); + +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js +var require_server_call_context = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryWriter = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var pb_long_1 = require_pb_long(); - var ReflectionBinaryWriter = class { - constructor(info6) { - this.info = info6; - } - prepare() { - if (!this.fields) { - const fieldsInput = this.info.fields ? this.info.fields.concat() : []; - this.fields = fieldsInput.sort((a, b) => a.no - b.no); - } + exports2.ServerCallContextController = void 0; + var ServerCallContextController = class { + constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { + this._cancelled = false; + this._listeners = []; + this.method = method; + this.headers = headers; + this.deadline = deadline; + this.trailers = {}; + this._sendRH = sendResponseHeadersFn; + this.status = defaultStatus; } /** - * Writes the message to binary format. + * Set the call cancelled. + * + * Invokes all callbacks registered with onCancel() and + * sets `cancelled = true`. */ - write(message, writer, options) { - this.prepare(); - for (const field of this.fields) { - let value, emitDefault, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - const group = message[field.oneof]; - if (group.oneofKind !== localName) - continue; - value = group[localName]; - emitDefault = true; - } else { - value = message[localName]; - emitDefault = false; - } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (repeated) { - assert_1.assert(Array.isArray(value)); - if (repeated == reflection_info_1.RepeatType.PACKED) - this.packed(writer, T, field.no, value); - else - for (const item of value) - this.scalar(writer, T, field.no, item, true); - } else if (value === void 0) - assert_1.assert(field.opt); - else - this.scalar(writer, T, field.no, value, emitDefault || field.opt); - break; - case "message": - if (repeated) { - assert_1.assert(Array.isArray(value)); - for (const item of value) - this.message(writer, options, field.T(), field.no, item); - } else { - this.message(writer, options, field.T(), field.no, value); - } - break; - case "map": - assert_1.assert(typeof value == "object" && value !== null); - for (const [key, val] of Object.entries(value)) - this.mapEntry(writer, options, field, key, val); - break; + notifyCancelled() { + if (!this._cancelled) { + this._cancelled = true; + for (let l of this._listeners) { + l(); } } - let u = options.writeUnknownFields; - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); - } - mapEntry(writer, options, field, key, value) { - writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let keyValue = key; - switch (field.K) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - keyValue = Number.parseInt(key); - break; - case reflection_info_1.ScalarType.BOOL: - assert_1.assert(key == "true" || key == "false"); - keyValue = key == "true"; - break; - } - this.scalar(writer, field.K, 1, keyValue, true); - switch (field.V.kind) { - case "scalar": - this.scalar(writer, field.V.T, 2, value, true); - break; - case "enum": - this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); - break; - case "message": - this.message(writer, options, field.V.T(), 2, value); - break; - } - writer.join(); - } - message(writer, options, handler2, fieldNo, value) { - if (value === void 0) - return; - handler2.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); - writer.join(); } /** - * Write a single scalar value. + * Send response headers. */ - scalar(writer, type2, fieldNo, value, emitDefault) { - let [wireType, method, isDefault] = this.scalarInfo(type2, value); - if (!isDefault || emitDefault) { - writer.tag(fieldNo, wireType); - writer[method](value); - } + sendResponseHeaders(data) { + this._sendRH(data); } /** - * Write an array of scalar values in packed format. + * Is the call cancelled? + * + * When the client closes the connection before the server + * is done, the call is cancelled. + * + * If you want to cancel a request on the server, throw a + * RpcError with the CANCELLED status code. */ - packed(writer, type2, fieldNo, value) { - if (!value.length) - return; - assert_1.assert(type2 !== reflection_info_1.ScalarType.BYTES && type2 !== reflection_info_1.ScalarType.STRING); - writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let [, method] = this.scalarInfo(type2); - for (let i = 0; i < value.length; i++) - writer[method](value[i]); - writer.join(); + get cancelled() { + return this._cancelled; } /** - * Get information for writing a scalar value. - * - * Returns tuple: - * [0]: appropriate WireType - * [1]: name of the appropriate method of IBinaryWriter - * [2]: whether the given value is a default value - * - * If argument `value` is omitted, [2] is always false. + * Add a callback for cancellation. */ - scalarInfo(type2, value) { - let t = binary_format_contract_1.WireType.Varint; - let m; - let i = value === void 0; - let d = value === 0; - switch (type2) { - case reflection_info_1.ScalarType.INT32: - m = "int32"; - break; - case reflection_info_1.ScalarType.STRING: - d = i || !value.length; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "string"; - break; - case reflection_info_1.ScalarType.BOOL: - d = value === false; - m = "bool"; - break; - case reflection_info_1.ScalarType.UINT32: - m = "uint32"; - break; - case reflection_info_1.ScalarType.DOUBLE: - t = binary_format_contract_1.WireType.Bit64; - m = "double"; - break; - case reflection_info_1.ScalarType.FLOAT: - t = binary_format_contract_1.WireType.Bit32; - m = "float"; - break; - case reflection_info_1.ScalarType.INT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "int64"; - break; - case reflection_info_1.ScalarType.UINT64: - d = i || pb_long_1.PbULong.from(value).isZero(); - m = "uint64"; - break; - case reflection_info_1.ScalarType.FIXED64: - d = i || pb_long_1.PbULong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "fixed64"; - break; - case reflection_info_1.ScalarType.BYTES: - d = i || !value.byteLength; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "bytes"; - break; - case reflection_info_1.ScalarType.FIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "fixed32"; - break; - case reflection_info_1.ScalarType.SFIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "sfixed32"; - break; - case reflection_info_1.ScalarType.SFIXED64: - d = i || pb_long_1.PbLong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "sfixed64"; - break; - case reflection_info_1.ScalarType.SINT32: - m = "sint32"; - break; - case reflection_info_1.ScalarType.SINT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "sint64"; - break; - } - return [t, m, i || d]; + onCancel(callback) { + const l = this._listeners; + l.push(callback); + return () => { + let i = l.indexOf(callback); + if (i >= 0) + l.splice(i, 1); + }; } }; - exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; + exports2.ServerCallContextController = ServerCallContextController; } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js -var require_reflection_create = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { +// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js +var require_commonjs17 = __commonJS({ + "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionCreate = void 0; - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var message_type_contract_1 = require_message_type_contract(); - function reflectionCreate(type2) { - const msg = type2.messagePrototype ? Object.create(type2.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type2 }); - for (let field of type2.fields) { - let name = field.localName; - if (field.opt) - continue; - if (field.oneof) - msg[field.oneof] = { oneofKind: void 0 }; - else if (field.repeat) - msg[name] = []; - else - switch (field.kind) { - case "scalar": - msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); - break; - case "enum": - msg[name] = 0; + var service_type_1 = require_service_type(); + Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: function() { + return service_type_1.ServiceType; + } }); + var reflection_info_1 = require_reflection_info2(); + Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: function() { + return reflection_info_1.readMethodOptions; + } }); + Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: function() { + return reflection_info_1.readMethodOption; + } }); + Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: function() { + return reflection_info_1.readServiceOption; + } }); + var rpc_error_1 = require_rpc_error(); + Object.defineProperty(exports2, "RpcError", { enumerable: true, get: function() { + return rpc_error_1.RpcError; + } }); + var rpc_options_1 = require_rpc_options(); + Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: function() { + return rpc_options_1.mergeRpcOptions; + } }); + var rpc_output_stream_1 = require_rpc_output_stream(); + Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: function() { + return rpc_output_stream_1.RpcOutputStreamController; + } }); + var test_transport_1 = require_test_transport(); + Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: function() { + return test_transport_1.TestTransport; + } }); + var deferred_1 = require_deferred(); + Object.defineProperty(exports2, "Deferred", { enumerable: true, get: function() { + return deferred_1.Deferred; + } }); + Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: function() { + return deferred_1.DeferredState; + } }); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: function() { + return duplex_streaming_call_1.DuplexStreamingCall; + } }); + var client_streaming_call_1 = require_client_streaming_call(); + Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: function() { + return client_streaming_call_1.ClientStreamingCall; + } }); + var server_streaming_call_1 = require_server_streaming_call(); + Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: function() { + return server_streaming_call_1.ServerStreamingCall; + } }); + var unary_call_1 = require_unary_call(); + Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: function() { + return unary_call_1.UnaryCall; + } }); + var rpc_interceptor_1 = require_rpc_interceptor(); + Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: function() { + return rpc_interceptor_1.stackIntercept; + } }); + Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackDuplexStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackClientStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackServerStreamingInterceptors; + } }); + Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: function() { + return rpc_interceptor_1.stackUnaryInterceptors; + } }); + var server_call_context_1 = require_server_call_context(); + Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: function() { + return server_call_context_1.ServerCallContextController; + } }); + } +}); + +// node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js +var require_cachescope = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheScope = void 0; + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var CacheScope$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheScope", [ + { + no: 1, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "permission", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + } + ]); + } + create(value) { + const message = { scope: "", permission: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string scope */ + 1: + message.scope = reader.string(); break; - case "map": - msg[name] = {}; + case /* int64 permission */ + 2: + message.permission = reader.int64().toString(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } + } + return message; } - return msg; - } - exports2.reflectionCreate = reflectionCreate; + internalBinaryWrite(message, writer, options) { + if (message.scope !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.permission !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CacheScope = new CacheScope$Type(); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js -var require_reflection_merge_partial = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { +// node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js +var require_cachemetadata = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionMergePartial = void 0; - function reflectionMergePartial(info6, target, source) { - let fieldValue, input = source, output; - for (let field of info6.fields) { - let name = field.localName; - if (field.oneof) { - const group = input[field.oneof]; - if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { - continue; - } - fieldValue = group[name]; - output = target[field.oneof]; - output.oneofKind = group.oneofKind; - if (fieldValue == void 0) { - delete output[name]; - continue; - } - } else { - fieldValue = input[name]; - output = target; - if (fieldValue == void 0) { - continue; - } - } - if (field.repeat) - output[name].length = fieldValue.length; - switch (field.kind) { - case "scalar": - case "enum": - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = fieldValue[i]; - else - output[name] = fieldValue; - break; - case "message": - let T = field.T(); - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = T.create(fieldValue[i]); - else if (output[name] === void 0) - output[name] = T.create(fieldValue); - else - T.mergePartial(output[name], fieldValue); - break; - case "map": - switch (field.V.kind) { - case "scalar": - case "enum": - Object.assign(output[name], fieldValue); - break; - case "message": - let T2 = field.V.T(); - for (let k of Object.keys(fieldValue)) - output[name][k] = T2.create(fieldValue[k]); - break; - } - break; + exports2.CacheMetadata = void 0; + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var cachescope_1 = require_cachescope(); + var CacheMetadata$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheMetadata", [ + { + no: 1, + name: "repository_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 2, name: "scope", kind: "message", repeat: 1, T: () => cachescope_1.CacheScope } + ]); + } + create(value) { + const message = { repositoryId: "0", scope: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 repository_id */ + 1: + message.repositoryId = reader.int64().toString(); + break; + case /* repeated github.actions.results.entities.v1.CacheScope scope */ + 2: + message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - } - exports2.reflectionMergePartial = reflectionMergePartial; + internalBinaryWrite(message, writer, options) { + if (message.repositoryId !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); + for (let i = 0; i < message.scope.length; i++) + cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CacheMetadata = new CacheMetadata$Type(); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js -var require_reflection_equals = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { +// node_modules/@actions/cache/lib/generated/results/api/v1/cache.js +var require_cache4 = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionEquals = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionEquals(info6, a, b) { - if (a === b) - return true; - if (!a || !b) - return false; - for (let field of info6.fields) { - let localName = field.localName; - let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; - let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; - switch (field.kind) { - case "enum": - case "scalar": - let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) - return false; - break; - case "map": - if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) - return false; - break; - case "message": - let T = field.T(); - if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) - return false; - break; + exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; + var runtime_rpc_1 = require_commonjs17(); + var runtime_1 = require_commonjs16(); + var runtime_2 = require_commonjs16(); + var runtime_3 = require_commonjs16(); + var runtime_4 = require_commonjs16(); + var runtime_5 = require_commonjs16(); + var cachemetadata_1 = require_cachemetadata(); + var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* string version */ + 3: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - return true; - } - exports2.reflectionEquals = reflectionEquals; - var objectValues = Object.values; - function primitiveEq(type2, a, b) { - if (a === b) - return true; - if (type2 !== reflection_info_1.ScalarType.BYTES) - return false; - let ba = a; - let bb = b; - if (ba.length !== bb.length) - return false; - for (let i = 0; i < ba.length; i++) - if (ba[i] != bb[i]) - return false; - return true; - } - function repeatedPrimitiveEq(type2, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!primitiveEq(type2, a[i], b[i])) - return false; - return true; - } - function repeatedMsgEq(type2, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!type2.equals(a[i], b[i])) - return false; - return true; - } - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js -var require_message_type = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_type_check_1 = require_reflection_type_check(); - var reflection_json_reader_1 = require_reflection_json_reader(); - var reflection_json_writer_1 = require_reflection_json_writer(); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - var reflection_create_1 = require_reflection_create(); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - var json_typings_1 = require_json_typings(); - var json_format_contract_1 = require_json_format_contract(); - var reflection_equals_1 = require_reflection_equals(); - var binary_writer_1 = require_binary_writer(); - var binary_reader_1 = require_binary_reader(); - var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); - var messageTypeDescriptor = baseDescriptors[message_type_contract_1.MESSAGE_TYPE] = {}; - var MessageType = class { - constructor(name, fields, options) { - this.defaultCheckDepth = 16; - this.typeName = name; - this.fields = fields.map(reflection_info_1.normalizeFieldInfo); - this.options = options !== null && options !== void 0 ? options : {}; - messageTypeDescriptor.value = this; - this.messagePrototype = Object.create(null, baseDescriptors); - this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); - this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); - this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); - this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); - this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.version !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); + var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_upload_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "message", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } create(value) { - let message = reflection_create_1.reflectionCreate(this); - if (value !== void 0) { - reflection_merge_partial_1.reflectionMergePartial(this, message, value); + const message = { ok: false, signedUploadUrl: "", message: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ + 2: + message.signedUploadUrl = reader.string(); + break; + case /* string message */ + 3: + message.message = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } return message; } - /** - * Clone the message. - * - * Unknown fields are discarded. - */ - clone(message) { - let copy = this.create(); - reflection_merge_partial_1.reflectionMergePartial(this, copy, message); - return copy; + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedUploadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); + if (message.message !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. - */ - equals(a, b) { - return reflection_equals_1.reflectionEquals(this, a, b); + }; + exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); + var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - /** - * Is the given value assignable to our message type - * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - is(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, false); + create(value) { + const message = { key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Is the given value assignable to our message type, - * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - isAssignable(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, true); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; } - /** - * Copy partial data into the target message. - */ - mergePartial(target, source) { - reflection_merge_partial_1.reflectionMergePartial(this, target, source); + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Create a new message from binary format. - */ - fromBinary(data, options) { - let opt = binary_reader_1.binaryReadOptions(options); - return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + }; + exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); + var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 3, + name: "message", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - /** - * Read a new message from a JSON value. - */ - fromJson(json2, options) { - return this.internalJsonRead(json2, json_format_contract_1.jsonReadOptions(options)); + create(value) { + const message = { ok: false, entryId: "0", message: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Read a new message from a JSON string. - * This is equivalent to `T.fromJson(JSON.parse(json))`. - */ - fromJsonString(json2, options) { - let value = JSON.parse(json2); - return this.fromJson(value, options); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); + break; + case /* string message */ + 3: + message.message = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; } - /** - * Write the message to canonical JSON value. - */ - toJson(message, options) { - return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + if (message.message !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Convert the message to canonical JSON string. - * This is equivalent to `JSON.stringify(T.toJson(t))` - */ - toJsonString(message, options) { - var _a; - let value = this.toJson(message, options); - return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + }; + exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); + var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - /** - * Write the message to binary format. - */ - toBinary(message, options) { - let opt = binary_writer_1.binaryWriteOptions(options); - return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * This is an internal method. If you just want to read a message from - * JSON, use `fromJson()` or `fromJsonString()`. - * - * Reads JSON value and merges the fields into the target - * according to protobuf rules. If the target is omitted, - * a new instance is created first. - */ - internalJsonRead(json2, options, target) { - if (json2 !== null && typeof json2 == "object" && !Array.isArray(json2)) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refJsonReader.read(json2, message, options); - return message; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json2)}.`); - } - /** - * This is an internal method. If you just want to write a message - * to JSON, use `toJson()` or `toJsonString(). - * - * Writes JSON value and returns it. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.write(message, options); + return message; } - /** - * This is an internal method. If you just want to write a message - * in binary format, use `toBinary()`. - * - * Serializes the message in binary format and appends it to the given - * writer. Returns passed writer. - */ internalBinaryWrite(message, writer, options) { - this.refBinWriter.write(message, writer, options); + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } - /** - * This is an internal method. If you just want to read a message from - * binary data, use `fromBinary()`. - * - * Reads data from binary format and merges the fields into - * the target according to protobuf rules. If the target is - * omitted, a new instance is created first. - */ + }; + exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); + var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_download_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "matched_key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refBinReader.read(reader, message, options, length); + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_download_url */ + 2: + message.signedDownloadUrl = reader.string(); + break; + case /* string matched_key */ + 3: + message.matchedKey = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } return message; } + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedDownloadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); + if (message.matchedKey !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } }; - exports2.MessageType = MessageType; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js -var require_reflection_contains_message_type = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.containsMessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - function containsMessageType(msg) { - return msg[message_type_contract_1.MESSAGE_TYPE] != null; - } - exports2.containsMessageType = containsMessageType; + exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); + exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse } + ]); } }); -// node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js -var require_enum_object = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { +// node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js +var require_cache_twirp_client = __commonJS({ + "node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; - function isEnumObject(arg) { - if (typeof arg != "object" || arg === null) { - return false; + exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; + var cache_1 = require_cache4(); + var CacheServiceClientJSON = class { + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); } - if (!arg.hasOwnProperty(0)) { - return false; + CreateCacheEntry(request2) { + const data = cache_1.CreateCacheEntryRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - for (let k of Object.keys(arg)) { - let num = parseInt(k); - if (!Number.isNaN(num)) { - let nam = arg[num]; - if (nam === void 0) - return false; - if (arg[nam] !== num) - return false; - } else { - let num2 = arg[k]; - if (num2 === void 0) - return false; - if (typeof num2 !== "number") - return false; - if (arg[num2] === void 0) - return false; - } + FinalizeCacheEntryUpload(request2) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { + ignoreUnknownFields: true + })); } - return true; - } - exports2.isEnumObject = isEnumObject; - function listEnumValues(enumObject) { - if (!isEnumObject(enumObject)) - throw new Error("not a typescript enum object"); - let values = []; - for (let [name, number] of Object.entries(enumObject)) - if (typeof number == "number") - values.push({ name, number }); - return values; - } - exports2.listEnumValues = listEnumValues; - function listEnumNames(enumObject) { - return listEnumValues(enumObject).map((val) => val.name); - } - exports2.listEnumNames = listEnumNames; - function listEnumNumbers(enumObject) { - return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); - } - exports2.listEnumNumbers = listEnumNumbers; - } -}); - -// node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs16 = __commonJS({ - "node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var json_typings_1 = require_json_typings(); - Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: function() { - return json_typings_1.typeofJsonValue; - } }); - Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: function() { - return json_typings_1.isJsonObject; - } }); - var base64_1 = require_base642(); - Object.defineProperty(exports2, "base64decode", { enumerable: true, get: function() { - return base64_1.base64decode; - } }); - Object.defineProperty(exports2, "base64encode", { enumerable: true, get: function() { - return base64_1.base64encode; - } }); - var protobufjs_utf8_1 = require_protobufjs_utf8(); - Object.defineProperty(exports2, "utf8read", { enumerable: true, get: function() { - return protobufjs_utf8_1.utf8read; - } }); - var binary_format_contract_1 = require_binary_format_contract(); - Object.defineProperty(exports2, "WireType", { enumerable: true, get: function() { - return binary_format_contract_1.WireType; - } }); - Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: function() { - return binary_format_contract_1.mergeBinaryOptions; - } }); - Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: function() { - return binary_format_contract_1.UnknownFieldHandler; - } }); - var binary_reader_1 = require_binary_reader(); - Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: function() { - return binary_reader_1.BinaryReader; - } }); - Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: function() { - return binary_reader_1.binaryReadOptions; - } }); - var binary_writer_1 = require_binary_writer(); - Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: function() { - return binary_writer_1.BinaryWriter; - } }); - Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: function() { - return binary_writer_1.binaryWriteOptions; - } }); - var pb_long_1 = require_pb_long(); - Object.defineProperty(exports2, "PbLong", { enumerable: true, get: function() { - return pb_long_1.PbLong; - } }); - Object.defineProperty(exports2, "PbULong", { enumerable: true, get: function() { - return pb_long_1.PbULong; - } }); - var json_format_contract_1 = require_json_format_contract(); - Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: function() { - return json_format_contract_1.jsonReadOptions; - } }); - Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: function() { - return json_format_contract_1.jsonWriteOptions; - } }); - Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: function() { - return json_format_contract_1.mergeJsonOptions; - } }); - var message_type_contract_1 = require_message_type_contract(); - Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: function() { - return message_type_contract_1.MESSAGE_TYPE; - } }); - var message_type_1 = require_message_type(); - Object.defineProperty(exports2, "MessageType", { enumerable: true, get: function() { - return message_type_1.MessageType; - } }); - var reflection_info_1 = require_reflection_info(); - Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: function() { - return reflection_info_1.ScalarType; - } }); - Object.defineProperty(exports2, "LongType", { enumerable: true, get: function() { - return reflection_info_1.LongType; - } }); - Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: function() { - return reflection_info_1.RepeatType; - } }); - Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: function() { - return reflection_info_1.normalizeFieldInfo; - } }); - Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: function() { - return reflection_info_1.readFieldOptions; - } }); - Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: function() { - return reflection_info_1.readFieldOption; - } }); - Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: function() { - return reflection_info_1.readMessageOption; - } }); - var reflection_type_check_1 = require_reflection_type_check(); - Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: function() { - return reflection_type_check_1.ReflectionTypeCheck; - } }); - var reflection_create_1 = require_reflection_create(); - Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: function() { - return reflection_create_1.reflectionCreate; - } }); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: function() { - return reflection_scalar_default_1.reflectionScalarDefault; - } }); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: function() { - return reflection_merge_partial_1.reflectionMergePartial; - } }); - var reflection_equals_1 = require_reflection_equals(); - Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: function() { - return reflection_equals_1.reflectionEquals; - } }); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: function() { - return reflection_binary_reader_1.ReflectionBinaryReader; - } }); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: function() { - return reflection_binary_writer_1.ReflectionBinaryWriter; - } }); - var reflection_json_reader_1 = require_reflection_json_reader(); - Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: function() { - return reflection_json_reader_1.ReflectionJsonReader; - } }); - var reflection_json_writer_1 = require_reflection_json_writer(); - Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: function() { - return reflection_json_writer_1.ReflectionJsonWriter; - } }); - var reflection_contains_message_type_1 = require_reflection_contains_message_type(); - Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: function() { - return reflection_contains_message_type_1.containsMessageType; - } }); - var oneof_1 = require_oneof(); - Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: function() { - return oneof_1.isOneofGroup; - } }); - Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: function() { - return oneof_1.setOneofValue; - } }); - Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: function() { - return oneof_1.getOneofValue; - } }); - Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: function() { - return oneof_1.clearOneofValue; - } }); - Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: function() { - return oneof_1.getSelectedOneofValue; - } }); - var enum_object_1 = require_enum_object(); - Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: function() { - return enum_object_1.listEnumValues; - } }); - Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: function() { - return enum_object_1.listEnumNames; - } }); - Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: function() { - return enum_object_1.listEnumNumbers; - } }); - Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: function() { - return enum_object_1.isEnumObject; - } }); - var lower_camel_case_1 = require_lower_camel_case(); - Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: function() { - return lower_camel_case_1.lowerCamelCase; - } }); - var assert_1 = require_assert(); - Object.defineProperty(exports2, "assert", { enumerable: true, get: function() { - return assert_1.assert; - } }); - Object.defineProperty(exports2, "assertNever", { enumerable: true, get: function() { - return assert_1.assertNever; - } }); - Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: function() { - return assert_1.assertInt32; - } }); - Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: function() { - return assert_1.assertUInt32; - } }); - Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: function() { - return assert_1.assertFloat32; - } }); - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js -var require_reflection_info2 = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs16(); - function normalizeMethodInfo(method, service) { - var _a, _b, _c; - let m = method; - m.service = service; - m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); - m.serverStreaming = !!m.serverStreaming; - m.clientStreaming = !!m.clientStreaming; - m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; - m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; - return m; - } - exports2.normalizeMethodInfo = normalizeMethodInfo; - function readMethodOptions(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; - } - exports2.readMethodOptions = readMethodOptions; - function readMethodOption(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; + GetCacheEntryDownloadURL(request2) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request2, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + }; + exports2.CacheServiceClientJSON = CacheServiceClientJSON; + var CacheServiceClientProtobuf = class { + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + CreateCacheEntry(request2) { + const data = cache_1.CreateCacheEntryRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - exports2.readMethodOption = readMethodOption; - function readServiceOption(service, extensionName, extensionType) { - const options = service.options; - if (!options) { - return void 0; + FinalizeCacheEntryUpload(request2) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + GetCacheEntryDownloadURL(request2) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request2); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - exports2.readServiceOption = readServiceOption; + }; + exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js -var require_service_type = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { +// node_modules/@actions/cache/lib/internal/shared/util.js +var require_util19 = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/util.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceType = void 0; - var reflection_info_1 = require_reflection_info2(); - var ServiceType = class { - constructor(typeName, methods, options) { - this.typeName = typeName; - this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); - this.options = options !== null && options !== void 0 ? options : {}; + exports2.maskSigUrl = maskSigUrl; + exports2.maskSecretUrls = maskSecretUrls; + var core_1 = require_core(); + function maskSigUrl(url) { + if (!url) + return; + try { + const parsedUrl = new URL(url); + const signature = parsedUrl.searchParams.get("sig"); + if (signature) { + (0, core_1.setSecret)(signature); + (0, core_1.setSecret)(encodeURIComponent(signature)); + } + } catch (error3) { + (0, core_1.debug)(`Failed to parse URL: ${url} ${error3 instanceof Error ? error3.message : String(error3)}`); } - }; - exports2.ServiceType = ServiceType; + } + function maskSecretUrls(body) { + if (typeof body !== "object" || body === null) { + (0, core_1.debug)("body is not an object or is null"); + return; + } + if ("signed_upload_url" in body && typeof body.signed_upload_url === "string") { + maskSigUrl(body.signed_upload_url); + } + if ("signed_download_url" in body && typeof body.signed_download_url === "string") { + maskSigUrl(body.signed_download_url); + } + } } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js -var require_rpc_error = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { +// node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js +var require_cacheTwirpClient = __commonJS({ + "node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcError = void 0; - var RpcError = class extends Error { - constructor(message, code = "UNKNOWN", meta) { - super(message); - this.name = "RpcError"; - Object.setPrototypeOf(this, new.target.prototype); - this.code = code; - this.meta = meta !== null && meta !== void 0 ? meta : {}; + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); } - toString() { - const l = [this.name + ": " + this.message]; - if (this.code) { - l.push(""); - l.push("Code: " + this.code); + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (this.serviceName && this.methodName) { - l.push("Method: " + this.serviceName + "/" + this.methodName); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - let m = Object.entries(this.meta); - if (m.length) { - l.push(""); - l.push("Meta:"); - for (let [k, v] of m) { - l.push(` ${k}: ${v}`); + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.internalCacheTwirpClient = internalCacheTwirpClient; + var core_1 = require_core(); + var user_agent_1 = require_user_agent(); + var errors_1 = require_errors3(); + var config_1 = require_config(); + var cacheUtils_1 = require_cacheUtils(); + var auth_1 = require_auth(); + var http_client_1 = require_lib(); + var cache_twirp_client_1 = require_cache_twirp_client(); + var util_1 = require_util19(); + var CacheServiceClient = class { + constructor(userAgent2, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { + this.maxAttempts = 5; + this.baseRetryIntervalMilliseconds = 3e3; + this.retryMultiplier = 1.5; + const token = (0, cacheUtils_1.getRuntimeToken)(); + this.baseUrl = (0, config_1.getCacheServiceURL)(); + if (maxAttempts) { + this.maxAttempts = maxAttempts; + } + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier; + } + this.httpClient = new http_client_1.HttpClient(userAgent2, [ + new auth_1.BearerCredentialHandler(token) + ]); + } + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + request(service, method, contentType, data) { + return __awaiter2(this, void 0, void 0, function* () { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; + (0, core_1.debug)(`[Request] ${method} ${url}`); + const headers = { + "Content-Type": contentType + }; + try { + const { body } = yield this.retryableRequest(() => __awaiter2(this, void 0, void 0, function* () { + return this.httpClient.post(url, JSON.stringify(data), headers); + })); + return body; + } catch (error3) { + throw new Error(`Failed to ${method}: ${error3.message}`); + } + }); + } + retryableRequest(operation) { + return __awaiter2(this, void 0, void 0, function* () { + let attempt = 0; + let errorMessage = ""; + let rawBody = ""; + while (attempt < this.maxAttempts) { + let isRetryable = false; + try { + const response = yield operation(); + const statusCode = response.message.statusCode; + rawBody = yield response.readBody(); + (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); + (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); + const body = JSON.parse(rawBody); + (0, util_1.maskSecretUrls)(body); + (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); + if (this.isSuccessStatusCode(statusCode)) { + return { response, body }; + } + isRetryable = this.isRetryableHttpStatusCode(statusCode); + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; + if (body.msg) { + if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { + throw new errors_1.UsageError(); + } + errorMessage = `${errorMessage}: ${body.msg}`; + } + if (statusCode === http_client_1.HttpCodes.TooManyRequests) { + const retryAfterHeader = response.message.headers["retry-after"]; + if (retryAfterHeader) { + const parsedSeconds = parseInt(retryAfterHeader, 10); + if (!isNaN(parsedSeconds) && parsedSeconds > 0) { + (0, core_1.warning)(`You've hit a rate limit, your rate limit will reset in ${parsedSeconds} seconds`); + } + } + throw new errors_1.RateLimitError(`Rate limited: ${errorMessage}`); + } + } catch (error3) { + if (error3 instanceof SyntaxError) { + (0, core_1.debug)(`Raw Body: ${rawBody}`); + } + if (error3 instanceof errors_1.UsageError) { + throw error3; + } + if (error3 instanceof errors_1.RateLimitError) { + throw error3; + } + if (errors_1.NetworkError.isNetworkErrorCode(error3 === null || error3 === void 0 ? void 0 : error3.code)) { + throw new errors_1.NetworkError(error3 === null || error3 === void 0 ? void 0 : error3.code); + } + isRetryable = true; + errorMessage = error3.message; + } + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`); + } + if (attempt + 1 === this.maxAttempts) { + throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); + } + const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); + (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); + yield this.sleep(retryTimeMilliseconds); + attempt++; } + throw new Error(`Request failed`); + }); + } + isSuccessStatusCode(statusCode) { + if (!statusCode) + return false; + return statusCode >= 200 && statusCode < 300; + } + isRetryableHttpStatusCode(statusCode) { + if (!statusCode) + return false; + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable + ]; + return retryableStatusCodes.includes(statusCode); + } + sleep(milliseconds) { + return __awaiter2(this, void 0, void 0, function* () { + return new Promise((resolve2) => setTimeout(resolve2, milliseconds)); + }); + } + getExponentialRetryTimeMilliseconds(attempt) { + if (attempt < 0) { + throw new Error("attempt should be a positive integer"); } - return l.join("\n"); + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds; + } + const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); + const maxTime = minTime * this.retryMultiplier; + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } }; - exports2.RpcError = RpcError; + function internalCacheTwirpClient(options) { + const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); + return new cache_twirp_client_1.CacheServiceClientJSON(client); + } } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js -var require_rpc_options = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { +// node_modules/@actions/cache/lib/internal/tar.js +var require_tar = __commonJS({ + "node_modules/@actions/cache/lib/internal/tar.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); + } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs16(); - function mergeRpcOptions(defaults, options) { - if (!options) - return defaults; - let o = {}; - copy(defaults, o); - copy(options, o); - for (let key of Object.keys(options)) { - let val = options[key]; - switch (key) { - case "jsonOptions": - o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + exports2.listTar = listTar; + exports2.extractTar = extractTar2; + exports2.createTar = createTar; + var exec_1 = require_exec(); + var io4 = __importStar2(require_io()); + var fs_1 = require("fs"); + var path4 = __importStar2(require("path")); + var utils = __importStar2(require_cacheUtils()); + var constants_1 = require_constants12(); + var IS_WINDOWS = process.platform === "win32"; + function getTarPath() { + return __awaiter2(this, void 0, void 0, function* () { + switch (process.platform) { + case "win32": { + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + } break; - case "binaryOptions": - o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + } + case "darwin": { + const gnuTar = yield io4.which("gtar", false); + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else { + return { + path: yield io4.which("tar", true), + type: constants_1.ArchiveToolType.BSD + }; + } + } + default: break; - case "meta": - o.meta = {}; - copy(defaults.meta, o.meta); - copy(options.meta, o.meta); + } + return { + path: yield io4.which("tar", true), + type: constants_1.ArchiveToolType.GNU + }; + }); + } + function getTarArgs(tarPath_1, compressionMethod_1, type_1) { + return __awaiter2(this, arguments, void 0, function* (tarPath, compressionMethod, type2, archivePath = "") { + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = "cache.tar"; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (type2) { + case "create": + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); break; - case "interceptors": - o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + case "extract": + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path4.sep}`, "g"), "/")); break; + case "list": + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), "-P"); + break; + } + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case "win32": + args.push("--force-local"); + break; + case "darwin": + args.push("--delay-directory-restore"); + break; + } + } + return args; + }); + } + function getCommands(compressionMethod_1, type_1) { + return __awaiter2(this, arguments, void 0, function* (compressionMethod, type2, archivePath = "") { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type2, archivePath); + const compressionArgs = type2 !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + if (BSD_TAR_ZSTD && type2 !== "create") { + args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; + } else { + args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; + } + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(" ")]; + }); + } + function getWorkingDirectory() { + var _a; + return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); + } + function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter2(this, void 0, void 0, function* () { + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -d --long=30 --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path4.sep}`, "g"), "/") + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -d --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path4.sep}`, "g"), "/") + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; + default: + return ["-z"]; + } + }); + } + function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --long=30 --force -o", + cacheFileName.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), + constants_1.TarFilename + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --force -o", + cacheFileName.replace(new RegExp(`\\${path4.sep}`, "g"), "/"), + constants_1.TarFilename + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; + default: + return ["-z"]; + } + }); + } + function execCommands(commands, cwd) { + return __awaiter2(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, void 0, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) + }); + } catch (error3) { + throw new Error(`${command.split(" ")[0]} failed with error: ${error3 === null || error3 === void 0 ? void 0 : error3.message}`); + } } - } - return o; + }); } - exports2.mergeRpcOptions = mergeRpcOptions; - function copy(a, into) { - if (!a) - return; - let c = into; - for (let [k, v] of Object.entries(a)) { - if (v instanceof Date) - c[k] = new Date(v.getTime()); - else if (Array.isArray(v)) - c[k] = v.concat(); - else - c[k] = v; - } + function listTar(archivePath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, "list", archivePath); + yield execCommands(commands); + }); + } + function extractTar2(archivePath, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + const workingDirectory = getWorkingDirectory(); + yield io4.mkdirP(workingDirectory); + const commands = yield getCommands(compressionMethod, "extract", archivePath); + yield execCommands(commands); + }); + } + function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter2(this, void 0, void 0, function* () { + (0, fs_1.writeFileSync)(path4.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + const commands = yield getCommands(compressionMethod, "create"); + yield execCommands(commands, archiveFolder); + }); } } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js -var require_deferred = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { +// node_modules/@actions/cache/lib/cache.js +var require_cache5 = __commonJS({ + "node_modules/@actions/cache/lib/cache.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Deferred = exports2.DeferredState = void 0; - var DeferredState; - (function(DeferredState2) { - DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; - DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; - DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; - })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); - var Deferred = class { - /** - * @param preventUnhandledRejectionWarning - prevents the warning - * "Unhandled Promise rejection" by adding a noop rejection handler. - * Working with calls returned from the runtime-rpc package in an - * async function usually means awaiting one call property after - * the other. This means that the "status" is not being awaited when - * an earlier await for the "headers" is rejected. This causes the - * "unhandled promise reject" warning. A more correct behaviour for - * calls might be to become aware whether at least one of the - * promises is handled and swallow the rejection warning for the - * others. - */ - constructor(preventUnhandledRejectionWarning = true) { - this._state = DeferredState.PENDING; - this._promise = new Promise((resolve2, reject) => { - this._resolve = resolve2; - this._reject = reject; - }); - if (preventUnhandledRejectionWarning) { - this._promise.catch((_) => { - }); - } - } - /** - * Get the current state of the promise. - */ - get state() { - return this._state; - } - /** - * Get the deferred promise. - */ - get promise() { - return this._promise; - } - /** - * Resolve the promise. Throws if the promise is already resolved or rejected. - */ - resolve(value) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); - this._resolve(value); - this._state = DeferredState.RESOLVED; - } - /** - * Reject the promise. Throws if the promise is already resolved or rejected. - */ - reject(reason) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); - this._reject(reason); - this._state = DeferredState.REJECTED; - } - /** - * Resolve the promise. Ignore if not pending. - */ - resolvePending(val) { - if (this._state === DeferredState.PENDING) - this.resolve(val); + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; } - /** - * Reject the promise. Ignore if not pending. - */ - rejectPending(reason) { - if (this._state === DeferredState.PENDING) - this.reject(reason); + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); + var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve2) { + resolve2(value); + }); } + return new (P || (P = Promise))(function(resolve2, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + function step(result) { + result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - exports2.Deferred = Deferred; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js -var require_rpc_output_stream = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcOutputStreamController = void 0; - var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs16(); - var RpcOutputStreamController = class { - constructor() { - this._lis = { - nxt: [], - msg: [], - err: [], - cmp: [] - }; - this._closed = false; - this._itState = { q: [] }; - } - // --- RpcOutputStream callback API - onNext(callback) { - return this.addLis(callback, this._lis.nxt); - } - onMessage(callback) { - return this.addLis(callback, this._lis.msg); - } - onError(callback) { - return this.addLis(callback, this._lis.err); - } - onComplete(callback) { - return this.addLis(callback, this._lis.cmp); - } - addLis(callback, list) { - list.push(callback); - return () => { - let i = list.indexOf(callback); - if (i >= 0) - list.splice(i, 1); - }; + exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; + exports2.isFeatureAvailable = isFeatureAvailable; + exports2.restoreCache = restoreCache3; + exports2.saveCache = saveCache3; + var core12 = __importStar2(require_core()); + var path4 = __importStar2(require("path")); + var utils = __importStar2(require_cacheUtils()); + var cacheHttpClient = __importStar2(require_cacheHttpClient()); + var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); + var config_1 = require_config(); + var tar_1 = require_tar(); + var http_client_1 = require_lib(); + var ValidationError = class _ValidationError extends Error { + constructor(message) { + super(message); + this.name = "ValidationError"; + Object.setPrototypeOf(this, _ValidationError.prototype); } - // remove all listeners - clearLis() { - for (let l of Object.values(this._lis)) - l.splice(0, l.length); + }; + exports2.ValidationError = ValidationError; + var ReserveCacheError = class _ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = "ReserveCacheError"; + Object.setPrototypeOf(this, _ReserveCacheError.prototype); } - // --- Controller API - /** - * Is this stream already closed by a completion or error? - */ - get closed() { - return this._closed !== false; + }; + exports2.ReserveCacheError = ReserveCacheError; + var FinalizeCacheError = class _FinalizeCacheError extends Error { + constructor(message) { + super(message); + this.name = "FinalizeCacheError"; + Object.setPrototypeOf(this, _FinalizeCacheError.prototype); } - /** - * Emit message, close with error, or close successfully, but only one - * at a time. - * Can be used to wrap a stream by using the other stream's `onNext`. - */ - notifyNext(message, error3, complete) { - runtime_1.assert((message ? 1 : 0) + (error3 ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); - if (message) - this.notifyMessage(message); - if (error3) - this.notifyError(error3); - if (complete) - this.notifyComplete(); + }; + exports2.FinalizeCacheError = FinalizeCacheError; + function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } - /** - * Emits a new message. Throws if stream is closed. - * - * Triggers onNext and onMessage callbacks. - */ - notifyMessage(message) { - runtime_1.assert(!this.closed, "stream is closed"); - this.pushIt({ value: message, done: false }); - this._lis.msg.forEach((l) => l(message)); - this._lis.nxt.forEach((l) => l(message, void 0, false)); + } + function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } - /** - * Closes the stream with an error. Throws if stream is closed. - * - * Triggers onNext and onError callbacks. - */ - notifyError(error3) { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = error3; - this.pushIt(error3); - this._lis.err.forEach((l) => l(error3)); - this._lis.nxt.forEach((l) => l(void 0, error3, false)); - this.clearLis(); + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } - /** - * Closes the stream successfully. Throws if stream is closed. - * - * Triggers onNext and onComplete callbacks. - */ - notifyComplete() { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = true; - this.pushIt({ value: null, done: true }); - this._lis.cmp.forEach((l) => l()); - this._lis.nxt.forEach((l) => l(void 0, void 0, true)); - this.clearLis(); + } + function isFeatureAvailable() { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + switch (cacheServiceVersion) { + case "v2": + return !!process.env["ACTIONS_RESULTS_URL"]; + case "v1": + default: + return !!process.env["ACTIONS_CACHE_URL"]; } - /** - * Creates an async iterator (that can be used with `for await {...}`) - * to consume the stream. - * - * Some things to note: - * - If an error occurs, the `for await` will throw it. - * - If an error occurred before the `for await` was started, `for await` - * will re-throw it. - * - If the stream is already complete, the `for await` will be empty. - * - If your `for await` consumes slower than the stream produces, - * for example because you are relaying messages in a slow operation, - * messages are queued. - */ - [Symbol.asyncIterator]() { - if (this._closed === true) - this.pushIt({ value: null, done: true }); - else if (this._closed !== false) - this.pushIt(this._closed); - return { - next: () => { - let state = this._itState; - runtime_1.assert(state, "bad state"); - runtime_1.assert(!state.p, "iterator contract broken"); - let first = state.q.shift(); - if (first) - return "value" in first ? Promise.resolve(first) : Promise.reject(first); - state.p = new deferred_1.Deferred(); - return state.p.promise; + } + function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core12.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + switch (cacheServiceVersion) { + case "v2": + return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + case "v1": + default: + return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + } + }); + } + function restoreCacheV1(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ""; + try { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod, + enableCrossOsArchive + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return void 0; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core12.info("Lookup only - skipping download"); + return cacheEntry.cacheKey; + } + archivePath = path4.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core12.debug(`Archive Path: ${archivePath}`); + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core12.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core12.info("Cache restored successfully"); + return cacheEntry.cacheKey; + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core12.error(`Failed to restore: ${error3.message}`); + } else { + core12.warning(`Failed to restore: ${error3.message}`); + } + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error3) { + core12.debug(`Failed to delete archive: ${error3}`); } - }; - } - // "push" a new iterator result. - // this either resolves a pending promise, or enqueues the result. - pushIt(result) { - let state = this._itState; - if (state.p) { - const p = state.p; - runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); - "value" in result ? p.resolve(result) : p.reject(result); - delete state.p; - } else { - state.q.push(result); } - } - }; - exports2.RpcOutputStreamController = RpcOutputStreamController; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js -var require_unary_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { + return void 0; + }); + } + function restoreCacheV2(paths_1, primaryKey_1, restoreKeys_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core12.debug("Resolved Keys:"); + core12.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + let archivePath = ""; + try { + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + const compressionMethod = yield utils.getCompressionMethod(); + const request2 = { + key: primaryKey, + restoreKeys, + version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) + }; + const response = yield twirpClient.GetCacheEntryDownloadURL(request2); + if (!response.ok) { + core12.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); + return void 0; + } + const isRestoreKeyMatch = request2.key !== response.matchedKey; + if (isRestoreKeyMatch) { + core12.info(`Cache hit for restore-key: ${response.matchedKey}`); + } else { + core12.info(`Cache hit for: ${response.matchedKey}`); + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core12.info("Lookup only - skipping download"); + return response.matchedKey; + } + archivePath = path4.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core12.debug(`Archive path: ${archivePath}`); + core12.debug(`Starting download of archive to: ${archivePath}`); + yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core12.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core12.info("Cache restored successfully"); + return response.matchedKey; + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core12.error(`Failed to restore: ${error3.message}`); + } else { + core12.warning(`Failed to restore: ${error3.message}`); + } + } + } finally { try { - step(generator.next(value)); - } catch (e) { - reject(e); + if (archivePath) { + yield utils.unlinkFile(archivePath); + } + } catch (error3) { + core12.debug(`Failed to delete archive: ${error3}`); } } - function rejected(value) { + return void 0; + }); + } + function saveCache3(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core12.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + checkKey(key); + switch (cacheServiceVersion) { + case "v2": + return yield saveCacheV2(paths, key, options, enableCrossOsArchive); + case "v1": + default: + return yield saveCacheV1(paths, key, options, enableCrossOsArchive); + } + }); + } + function saveCacheV1(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + var _a, _b, _c, _d, _e; + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path4.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core12.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core12.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core12.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core12.debug("Reserving Cache"); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + enableCrossOsArchive, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core12.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else if (typedError.name === ReserveCacheError.name) { + core12.info(`Failed to save: ${typedError.message}`); + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core12.error(`Failed to save: ${typedError.message}`); + } else { + core12.warning(`Failed to save: ${typedError.message}`); + } + } + } finally { try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + yield utils.unlinkFile(archivePath); + } catch (error3) { + core12.debug(`Failed to delete archive: ${error3}`); } } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + return cacheId; }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UnaryCall = void 0; - var UnaryCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request2; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * If you are only interested in the final outcome of this call, - * you can await it to receive a `FinishedUnaryCall`. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - response, - status, - trailers + } + function saveCacheV2(paths_1, key_1, options_1) { + return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { + options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); + const compressionMethod = yield utils.getCompressionMethod(); + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core12.debug("Cache Paths:"); + core12.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path4.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core12.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core12.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core12.debug(`File Size: ${archiveFileSize}`); + options.archiveSizeBytes = archiveFileSize; + core12.debug("Reserving Cache"); + const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); + const request2 = { + key, + version }; - }); - } - }; - exports2.UnaryCall = UnaryCall; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js -var require_server_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { - "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { + let signedUploadUrl; try { - step(generator.next(value)); - } catch (e) { - reject(e); + const response = yield twirpClient.CreateCacheEntry(request2); + if (!response.ok) { + if (response.message) { + core12.warning(`Cache reservation failed: ${response.message}`); + } + throw new Error(response.message || "Response was not ok"); + } + signedUploadUrl = response.signedUploadUrl; + } catch (error3) { + core12.debug(`Failed to reserve cache: ${error3}`); + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } - } - function rejected(value) { + core12.debug(`Attempting to upload cache located at: ${archivePath}`); + yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); + const finalizeRequest = { + key, + version, + sizeBytes: `${archiveFileSize}` + }; + const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); + core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + if (!finalizeResponse.ok) { + if (finalizeResponse.message) { + throw new FinalizeCacheError(finalizeResponse.message); + } + throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); + } + cacheId = parseInt(finalizeResponse.entryId); + } catch (error3) { + const typedError = error3; + if (typedError.name === ValidationError.name) { + throw error3; + } else if (typedError.name === ReserveCacheError.name) { + core12.info(`Failed to save: ${typedError.message}`); + } else if (typedError.name === FinalizeCacheError.name) { + core12.warning(typedError.message); + } else { + if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { + core12.error(`Failed to save: ${typedError.message}`); + } else { + core12.warning(`Failed to save: ${typedError.message}`); + } + } + } finally { try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + yield utils.unlinkFile(archivePath); + } catch (error3) { + core12.debug(`Failed to delete archive: ${error3}`); } } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + return cacheId; }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerStreamingCall = void 0; - var ServerStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request2; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * You should first setup some listeners to the `request` to - * see the actual messages the server replied with. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - status, - trailers - }; - }); - } - }; - exports2.ServerStreamingCall = ServerStreamingCall; + } } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js -var require_client_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { +// node_modules/@actions/tool-cache/lib/manifest.js +var require_manifest = __commonJS({ + "node_modules/@actions/tool-cache/lib/manifest.js"(exports2, module2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve2) { @@ -115223,47 +115375,126 @@ var require_client_streaming_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ClientStreamingCall = void 0; - var ClientStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request2; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + exports2._findMatch = _findMatch; + exports2._getOsVersion = _getOsVersion; + exports2._readLinuxVersionFile = _readLinuxVersionFile; + var semver6 = __importStar2(require_semver2()); + var core_1 = require_core(); + var os2 = require("os"); + var cp = require("child_process"); + var fs2 = require("fs"); + function _findMatch(versionSpec, stable, candidates, archFilter) { + return __awaiter2(this, void 0, void 0, function* () { + const platFilter = os2.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version = candidate.version; + (0, core_1.debug)(`check ${version} satisfies ${versionSpec}`); + if (semver6.satisfies(version, versionSpec) && (!stable || candidate.stable === stable)) { + file = candidate.files.find((item) => { + (0, core_1.debug)(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module2.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } else { + chk = semver6.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + (0, core_1.debug)(`matched ${candidate.version}`); + match = candidate; + break; + } + } + } + if (match && file) { + result = Object.assign({}, match); + result.files = [file]; + } + return result; + }); + } + function _getOsVersion() { + const plat = os2.platform(); + let version = ""; + if (plat === "darwin") { + version = cp.execSync("sw_vers -productVersion").toString(); + } else if (plat === "linux") { + const lsbContents = module2.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split("\n"); + for (const line of lines) { + const parts = line.split("="); + if (parts.length === 2 && (parts[0].trim() === "VERSION_ID" || parts[0].trim() === "DISTRIB_RELEASE")) { + version = parts[1].trim().replace(/^"/, "").replace(/"$/, ""); + break; + } + } + } } - promiseFinished() { - return __awaiter2(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - response, - status, - trailers - }; - }); + return version; + } + function _readLinuxVersionFile() { + const lsbReleaseFile = "/etc/lsb-release"; + const osReleaseFile = "/etc/os-release"; + let contents = ""; + if (fs2.existsSync(lsbReleaseFile)) { + contents = fs2.readFileSync(lsbReleaseFile).toString(); + } else if (fs2.existsSync(osReleaseFile)) { + contents = fs2.readFileSync(osReleaseFile).toString(); } - }; - exports2.ClientStreamingCall = ClientStreamingCall; + return contents; + } } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js -var require_duplex_streaming_call = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { +// node_modules/@actions/tool-cache/lib/retry-helper.js +var require_retry_helper = __commonJS({ + "node_modules/@actions/tool-cache/lib/retry-helper.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve2) { @@ -115292,46 +115523,94 @@ var require_duplex_streaming_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DuplexStreamingCall = void 0; - var DuplexStreamingCall = class { - constructor(method, requestHeaders, request2, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request2; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; + exports2.RetryHelper = void 0; + var core12 = __importStar2(require_core()); + var RetryHelper = class { + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error("max attempts should be greater than or equal to 1"); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error("min seconds should be less than or equal to max seconds"); + } } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + execute(action, isRetryable) { + return __awaiter2(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + try { + return yield action(); + } catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; + } + core12.info(err.message); + } + const seconds = this.getSleepAmount(); + core12.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; + } + return yield action(); + }); } - promiseFinished() { + getSleepAmount() { + return Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + this.minSeconds; + } + sleep(seconds) { return __awaiter2(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - status, - trailers - }; + return new Promise((resolve2) => setTimeout(resolve2, seconds * 1e3)); }); } }; - exports2.DuplexStreamingCall = DuplexStreamingCall; + exports2.RetryHelper = RetryHelper; } }); -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js -var require_test_transport = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { +// node_modules/@actions/tool-cache/lib/tool-cache.js +var require_tool_cache = __commonJS({ + "node_modules/@actions/tool-cache/lib/tool-cache.js"(exports2) { "use strict"; + var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m[k]; + } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + })); + var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }); + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { + var ownKeys2 = function(o) { + ownKeys2 = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys2(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + })(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve2) { @@ -115360,2074 +115639,1795 @@ var require_test_transport = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TestTransport = void 0; - var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs16(); - var rpc_output_stream_1 = require_rpc_output_stream(); - var rpc_options_1 = require_rpc_options(); - var unary_call_1 = require_unary_call(); - var server_streaming_call_1 = require_server_streaming_call(); - var client_streaming_call_1 = require_client_streaming_call(); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - var TestTransport = class _TestTransport { - /** - * Initialize with mock data. Omitted fields have default value. - */ - constructor(data) { - this.suppressUncaughtRejections = true; - this.headerDelay = 10; - this.responseDelay = 50; - this.betweenResponseDelay = 10; - this.afterResponseDelay = 10; - this.data = data !== null && data !== void 0 ? data : {}; - } - /** - * Sent message(s) during the last operation. - */ - get sentMessages() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.sent; - } else if (typeof this.lastInput == "object") { - return [this.lastInput.single]; - } - return []; + exports2.HTTPError = void 0; + exports2.downloadTool = downloadTool2; + exports2.extract7z = extract7z; + exports2.extractTar = extractTar2; + exports2.extractXar = extractXar; + exports2.extractZip = extractZip; + exports2.cacheDir = cacheDir2; + exports2.cacheFile = cacheFile; + exports2.find = find2; + exports2.findAllVersions = findAllVersions; + exports2.getManifestFromRepo = getManifestFromRepo; + exports2.findFromManifest = findFromManifest; + exports2.isExplicitVersion = isExplicitVersion; + exports2.evaluateVersions = evaluateVersions; + var core12 = __importStar2(require_core()); + var io4 = __importStar2(require_io()); + var crypto2 = __importStar2(require("crypto")); + var fs2 = __importStar2(require("fs")); + var mm = __importStar2(require_manifest()); + var os2 = __importStar2(require("os")); + var path4 = __importStar2(require("path")); + var httpm = __importStar2(require_lib()); + var semver6 = __importStar2(require_semver2()); + var stream = __importStar2(require("stream")); + var util = __importStar2(require("util")); + var assert_1 = require("assert"); + var exec_1 = require_exec(); + var retry_helper_1 = require_retry_helper(); + var HTTPError2 = class extends Error { + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); } - /** - * Sending message(s) completed? - */ - get sendComplete() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.completed; - } else if (typeof this.lastInput == "object") { + }; + exports2.HTTPError = HTTPError2; + var IS_WINDOWS = process.platform === "win32"; + var IS_MAC = process.platform === "darwin"; + var userAgent2 = "actions/tool-cache"; + function downloadTool2(url, dest, auth2, headers) { + return __awaiter2(this, void 0, void 0, function* () { + dest = dest || path4.join(_getTempDirectory(), crypto2.randomUUID()); + yield io4.mkdirP(path4.dirname(dest)); + core12.debug(`Downloading ${url}`); + core12.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); + const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter2(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || "", auth2, headers); + }), (err) => { + if (err instanceof HTTPError2 && err.httpStatusCode) { + if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) { + return false; + } + } return true; + }); + }); + } + function downloadToolAttempt(url, dest, auth2, headers) { + return __awaiter2(this, void 0, void 0, function* () { + if (fs2.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); } - return false; - } - // Creates a promise for response headers from the mock data. - promiseHeaders() { - var _a; - const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; - return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); - } - // Creates a promise for a single, valid, message from the mock data. - promiseSingleResponse(method) { - if (this.data.response instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.response); + const http = new httpm.HttpClient(userAgent2, [], { + allowRetries: false + }); + if (auth2) { + core12.debug("set auth"); + if (headers === void 0) { + headers = {}; + } + headers.authorization = auth2; } - let r; - if (Array.isArray(this.data.response)) { - runtime_1.assert(this.data.response.length > 0); - r = this.data.response[0]; - } else if (this.data.response !== void 0) { - r = this.data.response; - } else { - r = method.O.create(); + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError2(response.message.statusCode); + core12.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; } - runtime_1.assert(method.O.is(r)); - return Promise.resolve(r); - } - /** - * Pushes response messages from the mock data to the output stream. - * If an error response, status or trailers are mocked, the stream is - * closed with the respective error. - * Otherwise, stream is completed successfully. - * - * The returned promise resolves when the stream is closed. It should - * not reject. If it does, code is broken. - */ - streamResponses(method, stream, abort) { - return __awaiter2(this, void 0, void 0, function* () { - const messages = []; - if (this.data.response === void 0) { - messages.push(method.O.create()); - } else if (Array.isArray(this.data.response)) { - for (let msg of this.data.response) { - runtime_1.assert(method.O.is(msg)); - messages.push(msg); + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs2.createWriteStream(dest)); + core12.debug("download complete"); + succeeded = true; + return dest; + } finally { + if (!succeeded) { + core12.debug("download failed"); + try { + yield io4.rmRF(dest); + } catch (err) { + core12.debug(`Failed to delete '${dest}'. ${err.message}`); } - } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { - runtime_1.assert(method.O.is(this.data.response)); - messages.push(this.data.response); } + } + }); + } + function extract7z(file, dest, _7zPath) { + return __awaiter2(this, void 0, void 0, function* () { + (0, assert_1.ok)(IS_WINDOWS, "extract7z() not supported on current OS"); + (0, assert_1.ok)(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { try { - yield delay2(this.responseDelay, abort)(void 0); - } catch (error3) { - stream.notifyError(error3); - return; - } - if (this.data.response instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.response); - return; - } - for (let msg of messages) { - stream.notifyMessage(msg); - try { - yield delay2(this.betweenResponseDelay, abort)(void 0); - } catch (error3) { - stream.notifyError(error3); - return; - } + const logLevel = core12.isDebug() ? "-bb1" : "-bb0"; + const args = [ + "x", + // eXtract files with full paths + logLevel, + // -bb[0-3] : set output log level + "-bd", + // disable progress indicator + "-sccUTF-8", + // set charset for for console input/output + file + ]; + const options = { + silent: true + }; + yield (0, exec_1.exec)(`"${_7zPath}"`, args, options); + } finally { + process.chdir(originalCwd); } - if (this.data.status instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.status); - return; + } else { + const escapedScript = path4.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + "-NoLogo", + "-Sta", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io4.which("powershell", true); + yield (0, exec_1.exec)(`"${powershellPath}"`, args, options); + } finally { + process.chdir(originalCwd); } - if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.trailers); - return; + } + return dest; + }); + } + function extractTar2(file_1, dest_1) { + return __awaiter2(this, arguments, void 0, function* (file, dest, flags = "xz") { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + core12.debug("Checking tar --version"); + let versionOutput = ""; + yield (0, exec_1.exec)("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => versionOutput += data.toString(), + stderr: (data) => versionOutput += data.toString() } - stream.notifyComplete(); }); - } - // Creates a promise for response status from the mock data. - promiseStatus() { - var _a; - const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; - return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); - } - // Creates a promise for response trailers from the mock data. - promiseTrailers() { - var _a; - const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; - return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); - } - maybeSuppressUncaught(...promise) { - if (this.suppressUncaughtRejections) { - for (let p of promise) { - p.catch(() => { - }); - } + core12.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); + let args; + if (flags instanceof Array) { + args = flags; + } else { + args = [flags]; } - } - mergeOptions(options) { - return rpc_options_1.mergeRpcOptions({}, options); - } - unary(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { - }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); - } - serverStreaming(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); - } - clientStreaming(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => { - }).then(delay2(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => { - }).then(delay2(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); - } - duplex(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay2(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay2(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay2(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); - } - }; - exports2.TestTransport = TestTransport; - TestTransport.defaultHeaders = { - responseHeader: "test" - }; - TestTransport.defaultStatus = { - code: "OK", - detail: "all good" - }; - TestTransport.defaultTrailers = { - responseTrailer: "test" - }; - function delay2(ms, abort) { - return (v) => new Promise((resolve2, reject) => { - if (abort === null || abort === void 0 ? void 0 : abort.aborted) { - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + if (core12.isDebug() && !flags.includes("v")) { + args.push("-v"); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push("--force-local"); + destArg = dest.replace(/\\/g, "/"); + fileArg = file.replace(/\\/g, "/"); + } + if (isGnuTar) { + args.push("--warning=no-unknown-keyword"); + args.push("--overwrite"); + } + args.push("-C", destArg, "-f", fileArg); + yield (0, exec_1.exec)(`tar`, args); + return dest; + }); + } + function extractXar(file_1, dest_1) { + return __awaiter2(this, arguments, void 0, function* (file, dest, flags = []) { + (0, assert_1.ok)(IS_MAC, "extractXar() not supported on current OS"); + (0, assert_1.ok)(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; } else { - const id = setTimeout(() => resolve2(v), ms); - if (abort) { - abort.addEventListener("abort", (ev) => { - clearTimeout(id); - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - }); - } + args = [flags]; + } + args.push("-x", "-C", dest, "-f", file); + if (core12.isDebug()) { + args.push("-v"); + } + const xarPath = yield io4.which("xar", true); + yield (0, exec_1.exec)(`"${xarPath}"`, _unique(args)); + return dest; + }); + } + function extractZip(file, dest) { + return __awaiter2(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); + } else { + yield extractZipNix(file, dest); + } + return dest; + }); + } + function extractZipWin(file, dest) { + return __awaiter2(this, void 0, void 0, function* () { + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const pwshPath = yield io4.which("pwsh", false); + if (pwshPath) { + const pwshCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, + `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, + `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` + ].join(" "); + const args = [ + "-NoLogo", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + pwshCommand + ]; + core12.debug(`Using pwsh at path: ${pwshPath}`); + yield (0, exec_1.exec)(`"${pwshPath}"`, args); + } else { + const powershellCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, + `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, + `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` + ].join(" "); + const args = [ + "-NoLogo", + "-Sta", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + powershellCommand + ]; + const powershellPath = yield io4.which("powershell", true); + core12.debug(`Using powershell at path: ${powershellPath}`); + yield (0, exec_1.exec)(`"${powershellPath}"`, args); + } + }); + } + function extractZipNix(file, dest) { + return __awaiter2(this, void 0, void 0, function* () { + const unzipPath = yield io4.which("unzip", true); + const args = [file]; + if (!core12.isDebug()) { + args.unshift("-q"); } + args.unshift("-o"); + yield (0, exec_1.exec)(`"${unzipPath}"`, args, { cwd: dest }); }); } - var TestInputStream = class { - constructor(data, abort) { - this._completed = false; - this._sent = []; - this.data = data; - this.abort = abort; + function cacheDir2(sourceDir, tool, version, arch) { + return __awaiter2(this, void 0, void 0, function* () { + version = semver6.clean(version) || version; + arch = arch || os2.arch(); + core12.debug(`Caching tool ${tool} ${version} ${arch}`); + core12.debug(`source dir: ${sourceDir}`); + if (!fs2.statSync(sourceDir).isDirectory()) { + throw new Error("sourceDir is not a directory"); + } + const destPath = yield _createToolPath(tool, version, arch); + for (const itemName of fs2.readdirSync(sourceDir)) { + const s = path4.join(sourceDir, itemName); + yield io4.cp(s, destPath, { recursive: true }); + } + _completeToolPath(tool, version, arch); + return destPath; + }); + } + function cacheFile(sourceFile, targetFile, tool, version, arch) { + return __awaiter2(this, void 0, void 0, function* () { + version = semver6.clean(version) || version; + arch = arch || os2.arch(); + core12.debug(`Caching tool ${tool} ${version} ${arch}`); + core12.debug(`source file: ${sourceFile}`); + if (!fs2.statSync(sourceFile).isFile()) { + throw new Error("sourceFile is not a file"); + } + const destFolder = yield _createToolPath(tool, version, arch); + const destPath = path4.join(destFolder, targetFile); + core12.debug(`destination file ${destPath}`); + yield io4.cp(sourceFile, destPath); + _completeToolPath(tool, version, arch); + return destFolder; + }); + } + function find2(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error("toolName parameter is required"); } - get sent() { - return this._sent; + if (!versionSpec) { + throw new Error("versionSpec parameter is required"); } - get completed() { - return this._completed; + arch = arch || os2.arch(); + if (!isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = evaluateVersions(localVersions, versionSpec); + versionSpec = match; } - send(message) { - if (this.data.inputMessage instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputMessage); + let toolPath = ""; + if (versionSpec) { + versionSpec = semver6.clean(versionSpec) || ""; + const cachePath = path4.join(_getCacheDirectory(), toolName, versionSpec, arch); + core12.debug(`checking cache: ${cachePath}`); + if (fs2.existsSync(cachePath) && fs2.existsSync(`${cachePath}.complete`)) { + core12.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } else { + core12.debug("not found"); } - const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; - return Promise.resolve(void 0).then(() => { - this._sent.push(message); - }).then(delay2(delayMs, this.abort)); } - complete() { - if (this.data.inputComplete instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputComplete); + return toolPath; + } + function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os2.arch(); + const toolPath = path4.join(_getCacheDirectory(), toolName); + if (fs2.existsSync(toolPath)) { + const children = fs2.readdirSync(toolPath); + for (const child of children) { + if (isExplicitVersion(child)) { + const fullPath = path4.join(toolPath, child, arch || ""); + if (fs2.existsSync(fullPath) && fs2.existsSync(`${fullPath}.complete`)) { + versions.push(child); + } + } } - const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; - return Promise.resolve(void 0).then(() => { - this._completed = true; - }).then(delay2(delayMs, this.abort)); } - }; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js -var require_rpc_interceptor = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs16(); - function stackIntercept(kind, transport, method, options, input) { - var _a, _b, _c, _d; - if (kind == "unary") { - let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); - for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); + return versions; + } + function getManifestFromRepo(owner_1, repo_1, auth_1) { + return __awaiter2(this, arguments, void 0, function* (owner, repo, auth2, branch = "master") { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient("tool-cache"); + const headers = {}; + if (auth2) { + core12.debug("set auth"); + headers.authorization = auth2; } - return tail(method, input, options); - } - if (kind == "serverStreaming") { - let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); - for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { - const next = tail; - tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; } - return tail(method, input, options); - } - if (kind == "clientStreaming") { - let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); - for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); + let manifestUrl = ""; + for (const item of response.result.tree) { + if (item.path === "versions-manifest.json") { + manifestUrl = item.url; + break; + } } - return tail(method, options); - } - if (kind == "duplex") { - let tail = (mtd, opt) => transport.duplex(mtd, opt); - for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { - const next = tail; - tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); + headers["accept"] = "application/vnd.github.VERSION.raw"; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + versionsRaw = versionsRaw.replace(/^\uFEFF/, ""); + try { + releases = JSON.parse(versionsRaw); + } catch (_a) { + core12.debug("Invalid json"); + } } - return tail(method, options); - } - runtime_1.assertNever(kind); + return releases; + }); } - exports2.stackIntercept = stackIntercept; - function stackUnaryInterceptors(transport, method, input, options) { - return stackIntercept("unary", transport, method, options, input); + function findFromManifest(versionSpec_1, stable_1, manifest_1) { + return __awaiter2(this, arguments, void 0, function* (versionSpec, stable, manifest, archFilter = os2.arch()) { + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); } - exports2.stackUnaryInterceptors = stackUnaryInterceptors; - function stackServerStreamingInterceptors(transport, method, input, options) { - return stackIntercept("serverStreaming", transport, method, options, input); + function _createExtractFolder(dest) { + return __awaiter2(this, void 0, void 0, function* () { + if (!dest) { + dest = path4.join(_getTempDirectory(), crypto2.randomUUID()); + } + yield io4.mkdirP(dest); + return dest; + }); } - exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; - function stackClientStreamingInterceptors(transport, method, options) { - return stackIntercept("clientStreaming", transport, method, options); + function _createToolPath(tool, version, arch) { + return __awaiter2(this, void 0, void 0, function* () { + const folderPath = path4.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); + core12.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io4.rmRF(folderPath); + yield io4.rmRF(markerPath); + yield io4.mkdirP(folderPath); + return folderPath; + }); } - exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; - function stackDuplexStreamingInterceptors(transport, method, options) { - return stackIntercept("duplex", transport, method, options); + function _completeToolPath(tool, version, arch) { + const folderPath = path4.join(_getCacheDirectory(), tool, semver6.clean(version) || version, arch || ""); + const markerPath = `${folderPath}.complete`; + fs2.writeFileSync(markerPath, ""); + core12.debug("finished caching tool"); } - exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js -var require_server_call_context = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerCallContextController = void 0; - var ServerCallContextController = class { - constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { - this._cancelled = false; - this._listeners = []; - this.method = method; - this.headers = headers; - this.deadline = deadline; - this.trailers = {}; - this._sendRH = sendResponseHeadersFn; - this.status = defaultStatus; - } - /** - * Set the call cancelled. - * - * Invokes all callbacks registered with onCancel() and - * sets `cancelled = true`. - */ - notifyCancelled() { - if (!this._cancelled) { - this._cancelled = true; - for (let l of this._listeners) { - l(); - } - } - } - /** - * Send response headers. - */ - sendResponseHeaders(data) { - this._sendRH(data); - } - /** - * Is the call cancelled? - * - * When the client closes the connection before the server - * is done, the call is cancelled. - * - * If you want to cancel a request on the server, throw a - * RpcError with the CANCELLED status code. - */ - get cancelled() { - return this._cancelled; - } - /** - * Add a callback for cancellation. - */ - onCancel(callback) { - const l = this._listeners; - l.push(callback); - return () => { - let i = l.indexOf(callback); - if (i >= 0) - l.splice(i, 1); - }; - } - }; - exports2.ServerCallContextController = ServerCallContextController; - } -}); - -// node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs17 = __commonJS({ - "node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var service_type_1 = require_service_type(); - Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: function() { - return service_type_1.ServiceType; - } }); - var reflection_info_1 = require_reflection_info2(); - Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: function() { - return reflection_info_1.readMethodOptions; - } }); - Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: function() { - return reflection_info_1.readMethodOption; - } }); - Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: function() { - return reflection_info_1.readServiceOption; - } }); - var rpc_error_1 = require_rpc_error(); - Object.defineProperty(exports2, "RpcError", { enumerable: true, get: function() { - return rpc_error_1.RpcError; - } }); - var rpc_options_1 = require_rpc_options(); - Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: function() { - return rpc_options_1.mergeRpcOptions; - } }); - var rpc_output_stream_1 = require_rpc_output_stream(); - Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: function() { - return rpc_output_stream_1.RpcOutputStreamController; - } }); - var test_transport_1 = require_test_transport(); - Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: function() { - return test_transport_1.TestTransport; - } }); - var deferred_1 = require_deferred(); - Object.defineProperty(exports2, "Deferred", { enumerable: true, get: function() { - return deferred_1.Deferred; - } }); - Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: function() { - return deferred_1.DeferredState; - } }); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: function() { - return duplex_streaming_call_1.DuplexStreamingCall; - } }); - var client_streaming_call_1 = require_client_streaming_call(); - Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: function() { - return client_streaming_call_1.ClientStreamingCall; - } }); - var server_streaming_call_1 = require_server_streaming_call(); - Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: function() { - return server_streaming_call_1.ServerStreamingCall; - } }); - var unary_call_1 = require_unary_call(); - Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: function() { - return unary_call_1.UnaryCall; - } }); - var rpc_interceptor_1 = require_rpc_interceptor(); - Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: function() { - return rpc_interceptor_1.stackIntercept; - } }); - Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackDuplexStreamingInterceptors; - } }); - Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackClientStreamingInterceptors; - } }); - Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackServerStreamingInterceptors; - } }); - Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: function() { - return rpc_interceptor_1.stackUnaryInterceptors; - } }); - var server_call_context_1 = require_server_call_context(); - Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: function() { - return server_call_context_1.ServerCallContextController; - } }); - } -}); - -// node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js -var require_cachescope = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheScope = void 0; - var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var CacheScope$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.entities.v1.CacheScope", [ - { - no: 1, - name: "scope", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "permission", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - } - ]); - } - create(value) { - const message = { scope: "", permission: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string scope */ - 1: - message.scope = reader.string(); - break; - case /* int64 permission */ - 2: - message.permission = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + function isExplicitVersion(versionSpec) { + const c = semver6.clean(versionSpec) || ""; + core12.debug(`isExplicit: ${c}`); + const valid2 = semver6.valid(c) != null; + core12.debug(`explicit? ${valid2}`); + return valid2; + } + function evaluateVersions(versions, versionSpec) { + let version = ""; + core12.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver6.gt(a, b)) { + return 1; } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.scope !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); - if (message.permission !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.CacheScope = new CacheScope$Type(); - } -}); - -// node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js -var require_cachemetadata = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheMetadata = void 0; - var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var cachescope_1 = require_cachescope(); - var CacheMetadata$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.entities.v1.CacheMetadata", [ - { - no: 1, - name: "repository_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { no: 2, name: "scope", kind: "message", repeat: 1, T: () => cachescope_1.CacheScope } - ]); - } - create(value) { - const message = { repositoryId: "0", scope: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 repository_id */ - 1: - message.repositoryId = reader.int64().toString(); - break; - case /* repeated github.actions.results.entities.v1.CacheScope scope */ - 2: - message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver6.satisfies(potential, versionSpec); + if (satisfied) { + version = potential; + break; } - return message; } - internalBinaryWrite(message, writer, options) { - if (message.repositoryId !== "0") - writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); - for (let i = 0; i < message.scope.length; i++) - cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (version) { + core12.debug(`matched: ${version}`); + } else { + core12.debug("match not found"); } - }; - exports2.CacheMetadata = new CacheMetadata$Type(); + return version; + } + function _getCacheDirectory() { + const cacheDirectory = process.env["RUNNER_TOOL_CACHE"] || ""; + (0, assert_1.ok)(cacheDirectory, "Expected RUNNER_TOOL_CACHE to be defined"); + return cacheDirectory; + } + function _getTempDirectory() { + const tempDirectory = process.env["RUNNER_TEMP"] || ""; + (0, assert_1.ok)(tempDirectory, "Expected RUNNER_TEMP to be defined"); + return tempDirectory; + } + function _getGlobal(key, defaultValue) { + const value = global[key]; + return value !== void 0 ? value : defaultValue; + } + function _unique(values) { + return Array.from(new Set(values)); + } } }); -// node_modules/@actions/cache/lib/generated/results/api/v1/cache.js -var require_cache4 = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { +// node_modules/jsonschema/lib/helpers.js +var require_helpers3 = __commonJS({ + "node_modules/jsonschema/lib/helpers.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheService = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; - var runtime_rpc_1 = require_commonjs17(); - var runtime_1 = require_commonjs16(); - var runtime_2 = require_commonjs16(); - var runtime_3 = require_commonjs16(); - var runtime_4 = require_commonjs16(); - var runtime_5 = require_commonjs16(); - var cachemetadata_1 = require_cachemetadata(); - var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); + var uri = require("url"); + var ValidationError = exports2.ValidationError = function ValidationError2(message, instance, schema2, path4, name, argument) { + if (Array.isArray(path4)) { + this.path = path4; + this.property = path4.reduce(function(sum, item) { + return sum + makeSuffix(item); + }, "instance"); + } else if (path4 !== void 0) { + this.property = path4; } - create(value) { - const message = { key: "", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + if (message) { + this.message = message; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* string version */ - 3: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + if (schema2) { + var id = schema2.$id || schema2.id; + this.schema = id || schema2; } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.version !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (instance !== void 0) { + this.instance = instance; } + this.name = name; + this.argument = argument; + this.stack = this.toString(); }; - exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); - var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.CreateCacheEntryResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_upload_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "message", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, signedUploadUrl: "", message: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_upload_url */ - 2: - message.signedUploadUrl = reader.string(); - break; - case /* string message */ - 3: - message.message = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedUploadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); - if (message.message !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } + ValidationError.prototype.toString = function toString2() { + return this.property + " " + this.message; }; - exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); - var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "size_bytes", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "", sizeBytes: "0", version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* int64 size_bytes */ - 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + var ValidatorResult = exports2.ValidatorResult = function ValidatorResult2(instance, schema2, options, ctx) { + this.instance = instance; + this.schema = schema2; + this.options = options; + this.path = ctx.path; + this.propertyPath = ctx.propertyPath; + this.errors = []; + this.throwError = options && options.throwError; + this.throwFirst = options && options.throwFirst; + this.throwAll = options && options.throwAll; + this.disableFormat = options && options.disableFormat === true; + }; + ValidatorResult.prototype.addError = function addError(detail) { + var err; + if (typeof detail == "string") { + err = new ValidationError(detail, this.instance, this.schema, this.path); + } else { + if (!detail) throw new Error("Missing error detail"); + if (!detail.message) throw new Error("Missing error message"); + if (!detail.name) throw new Error("Missing validator type"); + err = new ValidationError(detail.message, this.instance, this.schema, this.path, detail.name, detail.argument); } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - if (message.sizeBytes !== "0") - writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + this.errors.push(err); + if (this.throwFirst) { + throw new ValidatorResultError(this); + } else if (this.throwError) { + throw err; } + return err; }; - exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); - var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "entry_id", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 3, - name: "message", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); + ValidatorResult.prototype.importErrors = function importErrors(res) { + if (typeof res == "string" || res && res.validatorType) { + this.addError(res); + } else if (res && res.errors) { + this.errors = this.errors.concat(res.errors); } - create(value) { - const message = { ok: false, entryId: "0", message: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + }; + function stringizer(v, i) { + return i + ": " + v.toString() + "\n"; + } + ValidatorResult.prototype.toString = function toString2(res) { + return this.errors.map(stringizer).join(""); + }; + Object.defineProperty(ValidatorResult.prototype, "valid", { get: function() { + return !this.errors.length; + } }); + module2.exports.ValidatorResultError = ValidatorResultError; + function ValidatorResultError(result) { + if (Error.captureStackTrace) { + Error.captureStackTrace(this, ValidatorResultError); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* int64 entry_id */ - 2: - message.entryId = reader.int64().toString(); - break; - case /* string message */ - 3: - message.message = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + this.instance = result.instance; + this.schema = result.schema; + this.options = result.options; + this.errors = result.errors; + } + ValidatorResultError.prototype = new Error(); + ValidatorResultError.prototype.constructor = ValidatorResultError; + ValidatorResultError.prototype.name = "Validation Error"; + var SchemaError = exports2.SchemaError = function SchemaError2(msg, schema2) { + this.message = msg; + this.schema = schema2; + Error.call(this, msg); + Error.captureStackTrace(this, SchemaError2); + }; + SchemaError.prototype = Object.create( + Error.prototype, + { + constructor: { value: SchemaError, enumerable: false }, + name: { value: "SchemaError", enumerable: false } } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.entryId !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); - if (message.message !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + ); + var SchemaContext = exports2.SchemaContext = function SchemaContext2(schema2, options, path4, base, schemas) { + this.schema = schema2; + this.options = options; + if (Array.isArray(path4)) { + this.path = path4; + this.propertyPath = path4.reduce(function(sum, item) { + return sum + makeSuffix(item); + }, "instance"); + } else { + this.propertyPath = path4; } + this.base = base; + this.schemas = schemas; }; - exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); - var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ - { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, - { - no: 2, - name: "key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "restore_keys", - kind: "scalar", - repeat: 2, - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 4, - name: "version", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { key: "", restoreKeys: [], version: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + SchemaContext.prototype.resolve = function resolve2(target) { + return uri.resolve(this.base, target); + }; + SchemaContext.prototype.makeChild = function makeChild(schema2, propertyName) { + var path4 = propertyName === void 0 ? this.path : this.path.concat([propertyName]); + var id = schema2.$id || schema2.id; + var base = uri.resolve(this.base, id || ""); + var ctx = new SchemaContext(schema2, this.options, path4, base, Object.create(this.schemas)); + if (id && !ctx.schemas[base]) { + ctx.schemas[base] = schema2; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* github.actions.results.entities.v1.CacheMetadata metadata */ - 1: - message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); - break; - case /* string key */ - 2: - message.key = reader.string(); - break; - case /* repeated string restore_keys */ - 3: - message.restoreKeys.push(reader.string()); - break; - case /* string version */ - 4: - message.version = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + return ctx; + }; + var FORMAT_REGEXPS = exports2.FORMAT_REGEXPS = { + // 7.3.1. Dates, Times, and Duration + "date-time": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])[tT ](2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])(\.\d+)?([zZ]|[+-]([0-5][0-9]):(60|[0-5][0-9]))$/, + "date": /^\d{4}-(?:0[0-9]{1}|1[0-2]{1})-(3[01]|0[1-9]|[12][0-9])$/, + "time": /^(2[0-4]|[01][0-9]):([0-5][0-9]):(60|[0-5][0-9])$/, + "duration": /P(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S)|\d+(D|M(\d+D)?|Y(\d+M(\d+D)?)?)(T\d+(H(\d+M(\d+S)?)?|M(\d+S)?|S))?|\d+W)/i, + // 7.3.2. Email Addresses + // TODO: fix the email production + "email": /^(?:[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+\.)*[\w\!\#\$\%\&\'\*\+\-\/\=\?\^\`\{\|\}\~]+@(?:(?:(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!\.)){0,61}[a-zA-Z0-9]?\.)+[a-zA-Z0-9](?:[a-zA-Z0-9\-](?!$)){0,61}[a-zA-Z0-9]?)|(?:\[(?:(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\.){3}(?:[01]?\d{1,2}|2[0-4]\d|25[0-5])\]))$/, + "idn-email": /^("(?:[!#-\[\]-\u{10FFFF}]|\\[\t -\u{10FFFF}])*"|[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*)@([!#-'*+\-/-9=?A-Z\^-\u{10FFFF}](?:\.?[!#-'*+\-/-9=?A-Z\^-\u{10FFFF}])*|\[[!-Z\^-\u{10FFFF}]*\])$/u, + // 7.3.3. Hostnames + // 7.3.4. IP Addresses + "ip-address": /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/, + // FIXME whitespace is invalid + "ipv6": /^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/, + // 7.3.5. Resource Identifiers + // TODO: A more accurate regular expression for "uri" goes: + // [A-Za-z][+\-.0-9A-Za-z]*:((/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?)?#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])|/?%[0-9A-Fa-f]{2}|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*(#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|/(/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?)? + "uri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, + "uri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/, + "iri": /^[a-zA-Z][a-zA-Z0-9+.-]*:[^\s]*$/, + "iri-reference": /^(((([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:?)?)|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?)?))#(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|(([A-Za-z][+\-.0-9A-Za-z]*)?%[0-9A-Fa-f]{2}|[!$&-.0-9;=@_~-\u{10FFFF}]|[A-Za-z][+\-.0-9A-Za-z]*[!$&-*,;=@_~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-.0-9;=@-Z_a-z~-\u{10FFFF}])*((([/?](%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?#|[/?])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*)?|([A-Za-z][+\-.0-9A-Za-z]*(:%[0-9A-Fa-f]{2}|:[!$&-.0-;=?-Z_a-z~-\u{10FFFF}]|[/?])|\?)(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|([A-Za-z][+\-.0-9A-Za-z]*:)?\/((%[0-9A-Fa-f]{2}|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)(:\d*)?[/?]|[!$&-.0-;=?-Z_a-z~-\u{10FFFF}])(%[0-9A-Fa-f]{2}|[!$&-;=?-Z_a-z~-\u{10FFFF}])*|\/((%[0-9A-Fa-f]{2}|[!$&-.0-9;=A-Z_a-z~-\u{10FFFF}])+(:\d*)?|(\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?:\d*|\[(([Vv][0-9A-Fa-f]+\.[!$&-.0-;=A-Z_a-z~-\u{10FFFF}]+)?|[.0-:A-Fa-f]+)\])?)?|[A-Za-z][+\-.0-9A-Za-z]*:?)?$/u, + "uuid": /^[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}$/i, + // 7.3.6. uri-template + "uri-template": /(%[0-9a-f]{2}|[!#$&(-;=?@\[\]_a-z~]|\{[!#&+,./;=?@|]?(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?(,(%[0-9a-f]{2}|[0-9_a-z])(\.?(%[0-9a-f]{2}|[0-9_a-z]))*(:[1-9]\d{0,3}|\*)?)*\})*/iu, + // 7.3.7. JSON Pointers + "json-pointer": /^(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*$/iu, + "relative-json-pointer": /^\d+(#|(\/([\x00-\x2e0-@\[-}\x7f]|~[01])*)*)$/iu, + // hostname regex from: http://stackoverflow.com/a/1420225/5628 + "hostname": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, + "host-name": /^(?=.{1,255}$)[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?(?:\.[0-9A-Za-z](?:(?:[0-9A-Za-z]|-){0,61}[0-9A-Za-z])?)*\.?$/, + "utc-millisec": function(input) { + return typeof input === "string" && parseFloat(input) === parseInt(input, 10) && !isNaN(input); + }, + // 7.3.8. regex + "regex": function(input) { + var result = true; + try { + new RegExp(input); + } catch (e) { + result = false; } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.metadata) - cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.key !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); - for (let i = 0; i < message.restoreKeys.length; i++) - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); - if (message.version !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } + return result; + }, + // Other definitions + // "style" was removed from JSON Schema in draft-4 and is deprecated + "style": /[\r\n\t ]*[^\r\n\t ][^:]*:[\r\n\t ]*[^\r\n\t ;]*[\r\n\t ]*;?/, + // "color" was removed from JSON Schema in draft-4 and is deprecated + "color": /^(#?([0-9A-Fa-f]{3}){1,2}\b|aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow|(rgb\(\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*,\s*\b([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\b\s*\))|(rgb\(\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*,\s*(\d?\d%|100%)+\s*\)))$/, + "phone": /^\+(?:[0-9] ?){6,14}[0-9]$/, + "alpha": /^[a-zA-Z]+$/, + "alphanumeric": /^[a-zA-Z0-9]+$/ }; - exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); - var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { - constructor() { - super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ - { - no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_download_url", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "matched_key", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - } - ]); - } - create(value) { - const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool ok */ - 1: - message.ok = reader.bool(); - break; - case /* string signed_download_url */ - 2: - message.signedDownloadUrl = reader.string(); - break; - case /* string matched_key */ - 3: - message.matchedKey = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + FORMAT_REGEXPS.regexp = FORMAT_REGEXPS.regex; + FORMAT_REGEXPS.pattern = FORMAT_REGEXPS.regex; + FORMAT_REGEXPS.ipv4 = FORMAT_REGEXPS["ip-address"]; + exports2.isFormat = function isFormat(input, format, validator) { + if (typeof input === "string" && FORMAT_REGEXPS[format] !== void 0) { + if (FORMAT_REGEXPS[format] instanceof RegExp) { + return FORMAT_REGEXPS[format].test(input); } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedDownloadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); - if (message.matchedKey !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (typeof FORMAT_REGEXPS[format] === "function") { + return FORMAT_REGEXPS[format](input); + } + } else if (validator && validator.customFormats && typeof validator.customFormats[format] === "function") { + return validator.customFormats[format](input); } + return true; }; - exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); - exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ - { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, - { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, - { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse } - ]); - } -}); - -// node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js -var require_cache_twirp_client = __commonJS({ - "node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; - var cache_1 = require_cache4(); - var CacheServiceClientJSON = class { - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); + var makeSuffix = exports2.makeSuffix = function makeSuffix2(key) { + key = key.toString(); + if (!key.match(/[.\s\[\]]/) && !key.match(/^[\d]/)) { + return "." + key; } - CreateCacheEntry(request2) { - const data = cache_1.CreateCacheEntryRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false - }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { - ignoreUnknownFields: true - })); + if (key.match(/^\d+$/)) { + return "[" + key + "]"; } - FinalizeCacheEntryUpload(request2) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false + return "[" + JSON.stringify(key) + "]"; + }; + exports2.deepCompareStrict = function deepCompareStrict(a, b) { + if (typeof a !== typeof b) { + return false; + } + if (Array.isArray(a)) { + if (!Array.isArray(b)) { + return false; + } + if (a.length !== b.length) { + return false; + } + return a.every(function(v, i) { + return deepCompareStrict(a[i], b[i]); }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { - ignoreUnknownFields: true - })); } - GetCacheEntryDownloadURL(request2) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request2, { - useProtoFieldName: true, - emitDefaultValues: false + if (typeof a === "object") { + if (!a || !b) { + return a === b; + } + var aKeys = Object.keys(a); + var bKeys = Object.keys(b); + if (aKeys.length !== bKeys.length) { + return false; + } + return aKeys.every(function(v) { + return deepCompareStrict(a[v], b[v]); }); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { - ignoreUnknownFields: true - })); } + return a === b; }; - exports2.CacheServiceClientJSON = CacheServiceClientJSON; - var CacheServiceClientProtobuf = class { - constructor(rpc) { - this.rpc = rpc; - this.CreateCacheEntry.bind(this); - this.FinalizeCacheEntryUpload.bind(this); - this.GetCacheEntryDownloadURL.bind(this); + function deepMerger(target, dst, e, i) { + if (typeof e === "object") { + dst[i] = deepMerge(target[i], e); + } else { + if (target.indexOf(e) === -1) { + dst.push(e); + } } - CreateCacheEntry(request2) { - const data = cache_1.CreateCacheEntryRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); - return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); + } + function copyist(src, dst, key) { + dst[key] = src[key]; + } + function copyistWithDeepMerge(target, src, dst, key) { + if (typeof src[key] !== "object" || !src[key]) { + dst[key] = src[key]; + } else { + if (!target[key]) { + dst[key] = src[key]; + } else { + dst[key] = deepMerge(target[key], src[key]); + } } - FinalizeCacheEntryUpload(request2) { - const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); - return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); + } + function deepMerge(target, src) { + var array = Array.isArray(src); + var dst = array && [] || {}; + if (array) { + target = target || []; + dst = dst.concat(target); + src.forEach(deepMerger.bind(null, target, dst)); + } else { + if (target && typeof target === "object") { + Object.keys(target).forEach(copyist.bind(null, target, dst)); + } + Object.keys(src).forEach(copyistWithDeepMerge.bind(null, target, src, dst)); } - GetCacheEntryDownloadURL(request2) { - const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request2); - const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); - return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); + return dst; + } + module2.exports.deepMerge = deepMerge; + exports2.objectGetPath = function objectGetPath(o, s) { + var parts = s.split("/").slice(1); + var k; + while (typeof (k = parts.shift()) == "string") { + var n = decodeURIComponent(k.replace(/~0/, "~").replace(/~1/g, "/")); + if (!(n in o)) return; + o = o[n]; } + return o; }; - exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; - } -}); - -// node_modules/@actions/cache/lib/internal/shared/util.js -var require_util19 = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/util.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.maskSigUrl = maskSigUrl; - exports2.maskSecretUrls = maskSecretUrls; - var core_1 = require_core(); - function maskSigUrl(url) { - if (!url) - return; - try { - const parsedUrl = new URL(url); - const signature = parsedUrl.searchParams.get("sig"); - if (signature) { - (0, core_1.setSecret)(signature); - (0, core_1.setSecret)(encodeURIComponent(signature)); - } - } catch (error3) { - (0, core_1.debug)(`Failed to parse URL: ${url} ${error3 instanceof Error ? error3.message : String(error3)}`); - } + function pathEncoder(v) { + return "/" + encodeURIComponent(v).replace(/~/g, "%7E"); } - function maskSecretUrls(body) { - if (typeof body !== "object" || body === null) { - (0, core_1.debug)("body is not an object or is null"); - return; + exports2.encodePath = function encodePointer(a) { + return a.map(pathEncoder).join(""); + }; + exports2.getDecimalPlaces = function getDecimalPlaces(number) { + var decimalPlaces = 0; + if (isNaN(number)) return decimalPlaces; + if (typeof number !== "number") { + number = Number(number); } - if ("signed_upload_url" in body && typeof body.signed_upload_url === "string") { - maskSigUrl(body.signed_upload_url); + var parts = number.toString().split("e"); + if (parts.length === 2) { + if (parts[1][0] !== "-") { + return decimalPlaces; + } else { + decimalPlaces = Number(parts[1].slice(1)); + } } - if ("signed_download_url" in body && typeof body.signed_download_url === "string") { - maskSigUrl(body.signed_download_url); + var decimalParts = parts[0].split("."); + if (decimalParts.length === 2) { + decimalPlaces += decimalParts[1].length; } - } + return decimalPlaces; + }; + exports2.isSchema = function isSchema(val) { + return typeof val === "object" && val || typeof val === "boolean"; + }; } }); -// node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js -var require_cacheTwirpClient = __commonJS({ - "node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { +// node_modules/jsonschema/lib/attribute.js +var require_attribute = __commonJS({ + "node_modules/jsonschema/lib/attribute.js"(exports2, module2) { "use strict"; - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); - } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + var helpers = require_helpers3(); + var ValidatorResult = helpers.ValidatorResult; + var SchemaError = helpers.SchemaError; + var attribute = {}; + attribute.ignoreProperties = { + // informative properties + "id": true, + "default": true, + "description": true, + "title": true, + // arguments to other properties + "additionalItems": true, + "then": true, + "else": true, + // special-handled properties + "$schema": true, + "$ref": true, + "extends": true }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.internalCacheTwirpClient = internalCacheTwirpClient; - var core_1 = require_core(); - var user_agent_1 = require_user_agent(); - var errors_1 = require_errors3(); - var config_1 = require_config(); - var cacheUtils_1 = require_cacheUtils(); - var auth_1 = require_auth(); - var http_client_1 = require_lib(); - var cache_twirp_client_1 = require_cache_twirp_client(); - var util_1 = require_util19(); - var CacheServiceClient = class { - constructor(userAgent2, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { - this.maxAttempts = 5; - this.baseRetryIntervalMilliseconds = 3e3; - this.retryMultiplier = 1.5; - const token = (0, cacheUtils_1.getRuntimeToken)(); - this.baseUrl = (0, config_1.getCacheServiceURL)(); - if (maxAttempts) { - this.maxAttempts = maxAttempts; - } - if (baseRetryIntervalMilliseconds) { - this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; - } - if (retryMultiplier) { - this.retryMultiplier = retryMultiplier; - } - this.httpClient = new http_client_1.HttpClient(userAgent2, [ - new auth_1.BearerCredentialHandler(token) - ]); + var validators = attribute.validators = {}; + validators.type = function validateType(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - // This function satisfies the Rpc interface. It is compatible with the JSON - // JSON generated client. - request(service, method, contentType, data) { - return __awaiter2(this, void 0, void 0, function* () { - const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; - (0, core_1.debug)(`[Request] ${method} ${url}`); - const headers = { - "Content-Type": contentType - }; - try { - const { body } = yield this.retryableRequest(() => __awaiter2(this, void 0, void 0, function* () { - return this.httpClient.post(url, JSON.stringify(data), headers); - })); - return body; - } catch (error3) { - throw new Error(`Failed to ${method}: ${error3.message}`); - } + var result = new ValidatorResult(instance, schema2, options, ctx); + var types = Array.isArray(schema2.type) ? schema2.type : [schema2.type]; + if (!types.some(this.testType.bind(this, instance, schema2, options, ctx))) { + var list = types.map(function(v) { + if (!v) return; + var id = v.$id || v.id; + return id ? "<" + id + ">" : v + ""; }); - } - retryableRequest(operation) { - return __awaiter2(this, void 0, void 0, function* () { - let attempt = 0; - let errorMessage = ""; - let rawBody = ""; - while (attempt < this.maxAttempts) { - let isRetryable = false; - try { - const response = yield operation(); - const statusCode = response.message.statusCode; - rawBody = yield response.readBody(); - (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); - (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); - const body = JSON.parse(rawBody); - (0, util_1.maskSecretUrls)(body); - (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); - if (this.isSuccessStatusCode(statusCode)) { - return { response, body }; - } - isRetryable = this.isRetryableHttpStatusCode(statusCode); - errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; - if (body.msg) { - if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { - throw new errors_1.UsageError(); - } - errorMessage = `${errorMessage}: ${body.msg}`; - } - if (statusCode === http_client_1.HttpCodes.TooManyRequests) { - const retryAfterHeader = response.message.headers["retry-after"]; - if (retryAfterHeader) { - const parsedSeconds = parseInt(retryAfterHeader, 10); - if (!isNaN(parsedSeconds) && parsedSeconds > 0) { - (0, core_1.warning)(`You've hit a rate limit, your rate limit will reset in ${parsedSeconds} seconds`); - } - } - throw new errors_1.RateLimitError(`Rate limited: ${errorMessage}`); - } - } catch (error3) { - if (error3 instanceof SyntaxError) { - (0, core_1.debug)(`Raw Body: ${rawBody}`); - } - if (error3 instanceof errors_1.UsageError) { - throw error3; - } - if (error3 instanceof errors_1.RateLimitError) { - throw error3; - } - if (errors_1.NetworkError.isNetworkErrorCode(error3 === null || error3 === void 0 ? void 0 : error3.code)) { - throw new errors_1.NetworkError(error3 === null || error3 === void 0 ? void 0 : error3.code); - } - isRetryable = true; - errorMessage = error3.message; - } - if (!isRetryable) { - throw new Error(`Received non-retryable error: ${errorMessage}`); - } - if (attempt + 1 === this.maxAttempts) { - throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); - } - const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); - (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); - yield this.sleep(retryTimeMilliseconds); - attempt++; - } - throw new Error(`Request failed`); + result.addError({ + name: "type", + argument: list, + message: "is not of a type(s) " + list }); } - isSuccessStatusCode(statusCode) { - if (!statusCode) - return false; - return statusCode >= 200 && statusCode < 300; + return result; + }; + function testSchemaNoThrow(instance, options, ctx, callback, schema2) { + var throwError2 = options.throwError; + var throwAll = options.throwAll; + options.throwError = false; + options.throwAll = false; + var res = this.validateSchema(instance, schema2, options, ctx); + options.throwError = throwError2; + options.throwAll = throwAll; + if (!res.valid && callback instanceof Function) { + callback(res); + } + return res.valid; + } + validators.anyOf = function validateAnyOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - isRetryableHttpStatusCode(statusCode) { - if (!statusCode) - return false; - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.GatewayTimeout, - http_client_1.HttpCodes.InternalServerError, - http_client_1.HttpCodes.ServiceUnavailable - ]; - return retryableStatusCodes.includes(statusCode); + var result = new ValidatorResult(instance, schema2, options, ctx); + var inner = new ValidatorResult(instance, schema2, options, ctx); + if (!Array.isArray(schema2.anyOf)) { + throw new SchemaError("anyOf must be an array"); } - sleep(milliseconds) { - return __awaiter2(this, void 0, void 0, function* () { - return new Promise((resolve2) => setTimeout(resolve2, milliseconds)); + if (!schema2.anyOf.some( + testSchemaNoThrow.bind( + this, + instance, + options, + ctx, + function(res) { + inner.importErrors(res); + } + ) + )) { + var list = schema2.anyOf.map(function(v, i) { + var id = v.$id || v.id; + if (id) return "<" + id + ">"; + return v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; }); - } - getExponentialRetryTimeMilliseconds(attempt) { - if (attempt < 0) { - throw new Error("attempt should be a positive integer"); - } - if (attempt === 0) { - return this.baseRetryIntervalMilliseconds; + if (options.nestedErrors) { + result.importErrors(inner); } - const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); - const maxTime = minTime * this.retryMultiplier; - return Math.trunc(Math.random() * (maxTime - minTime) + minTime); + result.addError({ + name: "anyOf", + argument: list, + message: "is not any of " + list.join(",") + }); } + return result; }; - function internalCacheTwirpClient(options) { - const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); - return new cache_twirp_client_1.CacheServiceClientJSON(client); - } - } -}); - -// node_modules/@actions/cache/lib/internal/tar.js -var require_tar = __commonJS({ - "node_modules/@actions/cache/lib/internal/tar.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; + validators.allOf = function validateAllOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); - } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); - }); + if (!Array.isArray(schema2.allOf)) { + throw new SchemaError("allOf must be an array"); } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + var result = new ValidatorResult(instance, schema2, options, ctx); + var self2 = this; + schema2.allOf.forEach(function(v, i) { + var valid2 = self2.validateSchema(instance, v, options, ctx); + if (!valid2.valid) { + var id = v.$id || v.id; + var msg = id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; + result.addError({ + name: "allOf", + argument: { id: msg, length: valid2.errors.length, valid: valid2 }, + message: "does not match allOf schema " + msg + " with " + valid2.errors.length + " error[s]:" + }); + result.importErrors(valid2); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); }); + return result; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listTar = listTar; - exports2.extractTar = extractTar2; - exports2.createTar = createTar; - var exec_1 = require_exec(); - var io4 = __importStar2(require_io()); - var fs_1 = require("fs"); - var path3 = __importStar2(require("path")); - var utils = __importStar2(require_cacheUtils()); - var constants_1 = require_constants12(); - var IS_WINDOWS = process.platform === "win32"; - function getTarPath() { - return __awaiter2(this, void 0, void 0, function* () { - switch (process.platform) { - case "win32": { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else if ((0, fs_1.existsSync)(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; - } - break; - } - case "darwin": { - const gnuTar = yield io4.which("gtar", false); - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else { - return { - path: yield io4.which("tar", true), - type: constants_1.ArchiveToolType.BSD - }; - } + validators.oneOf = function validateOneOf(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; + } + if (!Array.isArray(schema2.oneOf)) { + throw new SchemaError("oneOf must be an array"); + } + var result = new ValidatorResult(instance, schema2, options, ctx); + var inner = new ValidatorResult(instance, schema2, options, ctx); + var count = schema2.oneOf.filter( + testSchemaNoThrow.bind( + this, + instance, + options, + ctx, + function(res) { + inner.importErrors(res); } - default: - break; - } - return { - path: yield io4.which("tar", true), - type: constants_1.ArchiveToolType.GNU - }; + ) + ).length; + var list = schema2.oneOf.map(function(v, i) { + var id = v.$id || v.id; + return id || v.title && JSON.stringify(v.title) || v["$ref"] && "<" + v["$ref"] + ">" || "[subschema " + i + "]"; }); - } - function getTarArgs(tarPath_1, compressionMethod_1, type_1) { - return __awaiter2(this, arguments, void 0, function* (tarPath, compressionMethod, type2, archivePath = "") { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = "cache.tar"; - const workingDirectory = getWorkingDirectory(); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (type2) { - case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); - break; - case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path3.sep}`, "g"), "/")); - break; - case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), "-P"); - break; - } - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case "win32": - args.push("--force-local"); - break; - case "darwin": - args.push("--delay-directory-restore"); - break; - } + if (count !== 1) { + if (options.nestedErrors) { + result.importErrors(inner); } - return args; - }); + result.addError({ + name: "oneOf", + argument: list, + message: "is not exactly one from " + list.join(",") + }); + } + return result; + }; + validators.if = function validateIf(instance, schema2, options, ctx) { + if (instance === void 0) return null; + if (!helpers.isSchema(schema2.if)) throw new Error('Expected "if" keyword to be a schema'); + var ifValid = testSchemaNoThrow.call(this, instance, options, ctx, null, schema2.if); + var result = new ValidatorResult(instance, schema2, options, ctx); + var res; + if (ifValid) { + if (schema2.then === void 0) return; + if (!helpers.isSchema(schema2.then)) throw new Error('Expected "then" keyword to be a schema'); + res = this.validateSchema(instance, schema2.then, options, ctx.makeChild(schema2.then)); + result.importErrors(res); + } else { + if (schema2.else === void 0) return; + if (!helpers.isSchema(schema2.else)) throw new Error('Expected "else" keyword to be a schema'); + res = this.validateSchema(instance, schema2.else, options, ctx.makeChild(schema2.else)); + result.importErrors(res); + } + return result; + }; + function getEnumerableProperty(object, key) { + if (Object.hasOwnProperty.call(object, key)) return object[key]; + if (!(key in object)) return; + while (object = Object.getPrototypeOf(object)) { + if (Object.propertyIsEnumerable.call(object, key)) return object[key]; + } } - function getCommands(compressionMethod_1, type_1) { - return __awaiter2(this, arguments, void 0, function* (compressionMethod, type2, archivePath = "") { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type2, archivePath); - const compressionArgs = type2 !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - if (BSD_TAR_ZSTD && type2 !== "create") { - args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; - } else { - args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; + validators.propertyNames = function validatePropertyNames(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var subschema = schema2.propertyNames !== void 0 ? schema2.propertyNames : {}; + if (!helpers.isSchema(subschema)) throw new SchemaError('Expected "propertyNames" to be a schema (object or boolean)'); + for (var property in instance) { + if (getEnumerableProperty(instance, property) !== void 0) { + var res = this.validateSchema(property, subschema, options, ctx.makeChild(subschema)); + result.importErrors(res); } - if (BSD_TAR_ZSTD) { - return args; + } + return result; + }; + validators.properties = function validateProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var properties = schema2.properties || {}; + for (var property in properties) { + var subschema = properties[property]; + if (subschema === void 0) { + continue; + } else if (subschema === null) { + throw new SchemaError('Unexpected null, expected schema in "properties"'); } - return [args.join(" ")]; - }); - } - function getWorkingDirectory() { - var _a; - return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); - } - function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter2(this, void 0, void 0, function* () { - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -d --long=30 --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path3.sep}`, "g"), "/") - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -d --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path3.sep}`, "g"), "/") - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; - default: - return ["-z"]; + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, subschema, options, ctx); } - }); - } - function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), - constants_1.TarFilename - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path3.sep}`, "g"), "/"), - constants_1.TarFilename - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; - default: - return ["-z"]; + var prop = getEnumerableProperty(instance, property); + var res = this.validateSchema(prop, subschema, options, ctx.makeChild(subschema, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); + } + return result; + }; + function testAdditionalProperty(instance, schema2, options, ctx, property, result) { + if (!this.types.object(instance)) return; + if (schema2.properties && schema2.properties[property] !== void 0) { + return; + } + if (schema2.additionalProperties === false) { + result.addError({ + name: "additionalProperties", + argument: property, + message: "is not allowed to have the additional property " + JSON.stringify(property) + }); + } else { + var additionalProperties = schema2.additionalProperties || {}; + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, additionalProperties, options, ctx); } - }); + var res = this.validateSchema(instance[property], additionalProperties, options, ctx.makeChild(additionalProperties, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); + } } - function execCommands(commands, cwd) { - return __awaiter2(this, void 0, void 0, function* () { - for (const command of commands) { + validators.patternProperties = function validatePatternProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var patternProperties = schema2.patternProperties || {}; + for (var property in instance) { + var test = true; + for (var pattern in patternProperties) { + var subschema = patternProperties[pattern]; + if (subschema === void 0) { + continue; + } else if (subschema === null) { + throw new SchemaError('Unexpected null, expected schema in "patternProperties"'); + } try { - yield (0, exec_1.exec)(command, void 0, { - cwd, - env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) - }); - } catch (error3) { - throw new Error(`${command.split(" ")[0]} failed with error: ${error3 === null || error3 === void 0 ? void 0 : error3.message}`); + var regexp = new RegExp(pattern, "u"); + } catch (_e) { + regexp = new RegExp(pattern); + } + if (!regexp.test(property)) { + continue; + } + test = false; + if (typeof options.preValidateProperty == "function") { + options.preValidateProperty(instance, property, subschema, options, ctx); } + var res = this.validateSchema(instance[property], subschema, options, ctx.makeChild(subschema, property)); + if (res.instance !== result.instance[property]) result.instance[property] = res.instance; + result.importErrors(res); } - }); - } - function listTar(archivePath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, "list", archivePath); - yield execCommands(commands); - }); - } - function extractTar2(archivePath, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - const workingDirectory = getWorkingDirectory(); - yield io4.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, "extract", archivePath); - yield execCommands(commands); - }); - } - function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter2(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path3.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); - const commands = yield getCommands(compressionMethod, "create"); - yield execCommands(commands, archiveFolder); - }); - } - } -}); - -// node_modules/@actions/cache/lib/cache.js -var require_cache5 = __commonJS({ - "node_modules/@actions/cache/lib/cache.js"(exports2) { - "use strict"; - var __createBinding2 = exports2 && exports2.__createBinding || (Object.create ? (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { - return m[k]; - } }; - } - Object.defineProperty(o, k2, desc); - }) : (function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - })); - var __setModuleDefault2 = exports2 && exports2.__setModuleDefault || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }); - var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ (function() { - var ownKeys2 = function(o) { - ownKeys2 = Object.getOwnPropertyNames || function(o2) { - var ar = []; - for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; - return ar; - }; - return ownKeys2(o); - }; - return function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k = ownKeys2(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + if (test) { + testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); } - __setModuleDefault2(result, mod); - return result; - }; - })(); - var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve2) { - resolve2(value); + } + return result; + }; + validators.additionalProperties = function validateAdditionalProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + if (schema2.patternProperties) { + return null; + } + var result = new ValidatorResult(instance, schema2, options, ctx); + for (var property in instance) { + testAdditionalProperty.call(this, instance, schema2, options, ctx, property, result); + } + return result; + }; + validators.minProperties = function validateMinProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var keys = Object.keys(instance); + if (!(keys.length >= schema2.minProperties)) { + result.addError({ + name: "minProperties", + argument: schema2.minProperties, + message: "does not meet minimum property length of " + schema2.minProperties }); } - return new (P || (P = Promise))(function(resolve2, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } + return result; + }; + validators.maxProperties = function validateMaxProperties(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var keys = Object.keys(instance); + if (!(keys.length <= schema2.maxProperties)) { + result.addError({ + name: "maxProperties", + argument: schema2.maxProperties, + message: "does not meet maximum property length of " + schema2.maxProperties + }); + } + return result; + }; + validators.items = function validateItems(instance, schema2, options, ctx) { + var self2 = this; + if (!this.types.array(instance)) return; + if (schema2.items === void 0) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + instance.every(function(value, i) { + if (Array.isArray(schema2.items)) { + var items = schema2.items[i] === void 0 ? schema2.additionalItems : schema2.items[i]; + } else { + var items = schema2.items; } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } + if (items === void 0) { + return true; } - function step(result) { - result.done ? resolve2(result.value) : adopt(result.value).then(fulfilled, rejected); + if (items === false) { + result.addError({ + name: "items", + message: "additionalItems not permitted" + }); + return false; } - step((generator = generator.apply(thisArg, _arguments || [])).next()); + var res = self2.validateSchema(value, items, options, ctx.makeChild(items, i)); + if (res.instance !== result.instance[i]) result.instance[i] = res.instance; + result.importErrors(res); + return true; }); + return result; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.FinalizeCacheError = exports2.ReserveCacheError = exports2.ValidationError = void 0; - exports2.isFeatureAvailable = isFeatureAvailable; - exports2.restoreCache = restoreCache3; - exports2.saveCache = saveCache3; - var core12 = __importStar2(require_core()); - var path3 = __importStar2(require("path")); - var utils = __importStar2(require_cacheUtils()); - var cacheHttpClient = __importStar2(require_cacheHttpClient()); - var cacheTwirpClient = __importStar2(require_cacheTwirpClient()); - var config_1 = require_config(); - var tar_1 = require_tar(); - var http_client_1 = require_lib(); - var ValidationError = class _ValidationError extends Error { - constructor(message) { - super(message); - this.name = "ValidationError"; - Object.setPrototypeOf(this, _ValidationError.prototype); + validators.contains = function validateContains(instance, schema2, options, ctx) { + var self2 = this; + if (!this.types.array(instance)) return; + if (schema2.contains === void 0) return; + if (!helpers.isSchema(schema2.contains)) throw new Error('Expected "contains" keyword to be a schema'); + var result = new ValidatorResult(instance, schema2, options, ctx); + var count = instance.some(function(value, i) { + var res = self2.validateSchema(value, schema2.contains, options, ctx.makeChild(schema2.contains, i)); + return res.errors.length === 0; + }); + if (count === false) { + result.addError({ + name: "contains", + argument: schema2.contains, + message: "must contain an item matching given schema" + }); } + return result; }; - exports2.ValidationError = ValidationError; - var ReserveCacheError = class _ReserveCacheError extends Error { - constructor(message) { - super(message); - this.name = "ReserveCacheError"; - Object.setPrototypeOf(this, _ReserveCacheError.prototype); + validators.minimum = function validateMinimum(instance, schema2, options, ctx) { + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (schema2.exclusiveMinimum && schema2.exclusiveMinimum === true) { + if (!(instance > schema2.minimum)) { + result.addError({ + name: "minimum", + argument: schema2.minimum, + message: "must be greater than " + schema2.minimum + }); + } + } else { + if (!(instance >= schema2.minimum)) { + result.addError({ + name: "minimum", + argument: schema2.minimum, + message: "must be greater than or equal to " + schema2.minimum + }); + } } + return result; }; - exports2.ReserveCacheError = ReserveCacheError; - var FinalizeCacheError = class _FinalizeCacheError extends Error { - constructor(message) { - super(message); - this.name = "FinalizeCacheError"; - Object.setPrototypeOf(this, _FinalizeCacheError.prototype); + validators.maximum = function validateMaximum(instance, schema2, options, ctx) { + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (schema2.exclusiveMaximum && schema2.exclusiveMaximum === true) { + if (!(instance < schema2.maximum)) { + result.addError({ + name: "maximum", + argument: schema2.maximum, + message: "must be less than " + schema2.maximum + }); + } + } else { + if (!(instance <= schema2.maximum)) { + result.addError({ + name: "maximum", + argument: schema2.maximum, + message: "must be less than or equal to " + schema2.maximum + }); + } } + return result; }; - exports2.FinalizeCacheError = FinalizeCacheError; - function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + validators.exclusiveMinimum = function validateExclusiveMinimum(instance, schema2, options, ctx) { + if (typeof schema2.exclusiveMinimum === "boolean") return; + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var valid2 = instance > schema2.exclusiveMinimum; + if (!valid2) { + result.addError({ + name: "exclusiveMinimum", + argument: schema2.exclusiveMinimum, + message: "must be strictly greater than " + schema2.exclusiveMinimum + }); } - } - function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + return result; + }; + validators.exclusiveMaximum = function validateExclusiveMaximum(instance, schema2, options, ctx) { + if (typeof schema2.exclusiveMaximum === "boolean") return; + if (!this.types.number(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var valid2 = instance < schema2.exclusiveMaximum; + if (!valid2) { + result.addError({ + name: "exclusiveMaximum", + argument: schema2.exclusiveMaximum, + message: "must be strictly less than " + schema2.exclusiveMaximum + }); } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + return result; + }; + var validateMultipleOfOrDivisbleBy = function validateMultipleOfOrDivisbleBy2(instance, schema2, options, ctx, validationType, errorMessage) { + if (!this.types.number(instance)) return; + var validationArgument = schema2[validationType]; + if (validationArgument == 0) { + throw new SchemaError(validationType + " cannot be zero"); } - } - function isFeatureAvailable() { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - switch (cacheServiceVersion) { - case "v2": - return !!process.env["ACTIONS_RESULTS_URL"]; - case "v1": - default: - return !!process.env["ACTIONS_CACHE_URL"]; + var result = new ValidatorResult(instance, schema2, options, ctx); + var instanceDecimals = helpers.getDecimalPlaces(instance); + var divisorDecimals = helpers.getDecimalPlaces(validationArgument); + var maxDecimals = Math.max(instanceDecimals, divisorDecimals); + var multiplier = Math.pow(10, maxDecimals); + if (Math.round(instance * multiplier) % Math.round(validationArgument * multiplier) !== 0) { + result.addError({ + name: validationType, + argument: validationArgument, + message: errorMessage + JSON.stringify(validationArgument) + }); } - } - function restoreCache3(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - switch (cacheServiceVersion) { - case "v2": - return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - case "v1": - default: - return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); - } - }); - } - function restoreCacheV1(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - const compressionMethod = yield utils.getCompressionMethod(); - let archivePath = ""; - try { - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod, - enableCrossOsArchive - }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return void 0; - } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); - return cacheEntry.cacheKey; - } - archivePath = path3.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core12.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); - return cacheEntry.cacheKey; - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); - } else { - core12.warning(`Failed to restore: ${error3.message}`); - } - } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + return result; + }; + validators.multipleOf = function validateMultipleOf(instance, schema2, options, ctx) { + return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "multipleOf", "is not a multiple of (divisible by) "); + }; + validators.divisibleBy = function validateDivisibleBy(instance, schema2, options, ctx) { + return validateMultipleOfOrDivisbleBy.call(this, instance, schema2, options, ctx, "divisibleBy", "is not divisible by (multiple of) "); + }; + validators.required = function validateRequired(instance, schema2, options, ctx) { + var result = new ValidatorResult(instance, schema2, options, ctx); + if (instance === void 0 && schema2.required === true) { + result.addError({ + name: "required", + message: "is required" + }); + } else if (this.types.object(instance) && Array.isArray(schema2.required)) { + schema2.required.forEach(function(n) { + if (getEnumerableProperty(instance, n) === void 0) { + result.addError({ + name: "required", + argument: n, + message: "requires property " + JSON.stringify(n) + }); } + }); + } + return result; + }; + validators.pattern = function validatePattern(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var pattern = schema2.pattern; + try { + var regexp = new RegExp(pattern, "u"); + } catch (_e) { + regexp = new RegExp(pattern); + } + if (!instance.match(regexp)) { + result.addError({ + name: "pattern", + argument: schema2.pattern, + message: "does not match pattern " + JSON.stringify(schema2.pattern.toString()) + }); + } + return result; + }; + validators.format = function validateFormat(instance, schema2, options, ctx) { + if (instance === void 0) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!result.disableFormat && !helpers.isFormat(instance, schema2.format, this)) { + result.addError({ + name: "format", + argument: schema2.format, + message: "does not conform to the " + JSON.stringify(schema2.format) + " format" + }); + } + return result; + }; + validators.minLength = function validateMinLength(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var hsp = instance.match(/[\uDC00-\uDFFF]/g); + var length = instance.length - (hsp ? hsp.length : 0); + if (!(length >= schema2.minLength)) { + result.addError({ + name: "minLength", + argument: schema2.minLength, + message: "does not meet minimum length of " + schema2.minLength + }); + } + return result; + }; + validators.maxLength = function validateMaxLength(instance, schema2, options, ctx) { + if (!this.types.string(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + var hsp = instance.match(/[\uDC00-\uDFFF]/g); + var length = instance.length - (hsp ? hsp.length : 0); + if (!(length <= schema2.maxLength)) { + result.addError({ + name: "maxLength", + argument: schema2.maxLength, + message: "does not meet maximum length of " + schema2.maxLength + }); + } + return result; + }; + validators.minItems = function validateMinItems(instance, schema2, options, ctx) { + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!(instance.length >= schema2.minItems)) { + result.addError({ + name: "minItems", + argument: schema2.minItems, + message: "does not meet minimum length of " + schema2.minItems + }); + } + return result; + }; + validators.maxItems = function validateMaxItems(instance, schema2, options, ctx) { + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!(instance.length <= schema2.maxItems)) { + result.addError({ + name: "maxItems", + argument: schema2.maxItems, + message: "does not meet maximum length of " + schema2.maxItems + }); + } + return result; + }; + function testArrays(v, i, a) { + var j, len = a.length; + for (j = i + 1, len; j < len; j++) { + if (helpers.deepCompareStrict(v, a[j])) { + return false; } - return void 0; - }); + } + return true; } - function restoreCacheV2(paths_1, primaryKey_1, restoreKeys_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core12.debug("Resolved Keys:"); - core12.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + validators.uniqueItems = function validateUniqueItems(instance, schema2, options, ctx) { + if (schema2.uniqueItems !== true) return; + if (!this.types.array(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!instance.every(testArrays)) { + result.addError({ + name: "uniqueItems", + message: "contains duplicate item" + }); + } + return result; + }; + validators.dependencies = function validateDependencies(instance, schema2, options, ctx) { + if (!this.types.object(instance)) return; + var result = new ValidatorResult(instance, schema2, options, ctx); + for (var property in schema2.dependencies) { + if (instance[property] === void 0) { + continue; } - for (const key of keys) { - checkKey(key); + var dep = schema2.dependencies[property]; + var childContext = ctx.makeChild(dep, property); + if (typeof dep == "string") { + dep = [dep]; } - let archivePath = ""; - try { - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - const compressionMethod = yield utils.getCompressionMethod(); - const request2 = { - key: primaryKey, - restoreKeys, - version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) - }; - const response = yield twirpClient.GetCacheEntryDownloadURL(request2); - if (!response.ok) { - core12.debug(`Cache not found for version ${request2.version} of keys: ${keys.join(", ")}`); - return void 0; - } - const isRestoreKeyMatch = request2.key !== response.matchedKey; - if (isRestoreKeyMatch) { - core12.info(`Cache hit for restore-key: ${response.matchedKey}`); - } else { - core12.info(`Cache hit for: ${response.matchedKey}`); - } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core12.info("Lookup only - skipping download"); - return response.matchedKey; - } - archivePath = path3.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive path: ${archivePath}`); - core12.debug(`Starting download of archive to: ${archivePath}`); - yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - if (core12.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core12.info("Cache restored successfully"); - return response.matchedKey; - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to restore: ${error3.message}`); - } else { - core12.warning(`Failed to restore: ${error3.message}`); - } - } - } finally { - try { - if (archivePath) { - yield utils.unlinkFile(archivePath); + if (Array.isArray(dep)) { + dep.forEach(function(prop) { + if (instance[prop] === void 0) { + result.addError({ + // FIXME there's two different "dependencies" errors here with slightly different outputs + // Can we make these the same? Or should we create different error types? + name: "dependencies", + argument: childContext.propertyPath, + message: "property " + prop + " not found, required by " + childContext.propertyPath + }); } - } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + }); + } else { + var res = this.validateSchema(instance, dep, options, childContext); + if (result.instance !== res.instance) result.instance = res.instance; + if (res && res.errors.length) { + result.addError({ + name: "dependencies", + argument: childContext.propertyPath, + message: "does not meet dependency required by " + childContext.propertyPath + }); + result.importErrors(res); } } - return void 0; - }); - } - function saveCache3(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); - core12.debug(`Cache service version: ${cacheServiceVersion}`); - checkPaths(paths); - checkKey(key); - switch (cacheServiceVersion) { - case "v2": - return yield saveCacheV2(paths, key, options, enableCrossOsArchive); - case "v1": - default: - return yield saveCacheV1(paths, key, options, enableCrossOsArchive); - } + } + return result; + }; + validators["enum"] = function validateEnum(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; + } + if (!Array.isArray(schema2["enum"])) { + throw new SchemaError("enum expects an array", schema2); + } + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!schema2["enum"].some(helpers.deepCompareStrict.bind(null, instance))) { + result.addError({ + name: "enum", + argument: schema2["enum"], + message: "is not one of enum values: " + schema2["enum"].map(String).join(",") + }); + } + return result; + }; + validators["const"] = function validateEnum(instance, schema2, options, ctx) { + if (instance === void 0) { + return null; + } + var result = new ValidatorResult(instance, schema2, options, ctx); + if (!helpers.deepCompareStrict(schema2["const"], instance)) { + result.addError({ + name: "const", + argument: schema2["const"], + message: "does not exactly match expected constant: " + schema2["const"] + }); + } + return result; + }; + validators.not = validators.disallow = function validateNot(instance, schema2, options, ctx) { + var self2 = this; + if (instance === void 0) return null; + var result = new ValidatorResult(instance, schema2, options, ctx); + var notTypes = schema2.not || schema2.disallow; + if (!notTypes) return null; + if (!Array.isArray(notTypes)) notTypes = [notTypes]; + notTypes.forEach(function(type2) { + if (self2.testType(instance, schema2, options, ctx, type2)) { + var id = type2 && (type2.$id || type2.id); + var schemaId = id || type2; + result.addError({ + name: "not", + argument: schemaId, + message: "is of prohibited type " + schemaId + }); + } }); + return result; + }; + module2.exports = attribute; + } +}); + +// node_modules/jsonschema/lib/scan.js +var require_scan = __commonJS({ + "node_modules/jsonschema/lib/scan.js"(exports2, module2) { + "use strict"; + var urilib = require("url"); + var helpers = require_helpers3(); + module2.exports.SchemaScanResult = SchemaScanResult; + function SchemaScanResult(found, ref) { + this.id = found; + this.ref = ref; } - function saveCacheV1(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - var _a, _b, _c, _d, _e; - const compressionMethod = yield utils.getCompressionMethod(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + module2.exports.scan = function scan(base, schema2) { + function scanSchema(baseuri, schema3) { + if (!schema3 || typeof schema3 != "object") return; + if (schema3.$ref) { + var resolvedUri = urilib.resolve(baseuri, schema3.$ref); + ref[resolvedUri] = ref[resolvedUri] ? ref[resolvedUri] + 1 : 0; + return; } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path3.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - core12.debug("Reserving Cache"); - const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod, - enableCrossOsArchive, - cacheSize: archiveFileSize - }); - if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { - cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; - } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { - throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); - } else { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); - } - core12.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); - } else { - core12.warning(`Failed to save: ${typedError.message}`); + var id = schema3.$id || schema3.id; + var ourBase = id ? urilib.resolve(baseuri, id) : baseuri; + if (ourBase) { + if (ourBase.indexOf("#") < 0) ourBase += "#"; + if (found[ourBase]) { + if (!helpers.deepCompareStrict(found[ourBase], schema3)) { + throw new Error("Schema <" + ourBase + "> already exists with different definition"); } + return found[ourBase]; } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + found[ourBase] = schema3; + if (ourBase[ourBase.length - 1] == "#") { + found[ourBase.substring(0, ourBase.length - 1)] = schema3; } } - return cacheId; + scanArray(ourBase + "/items", Array.isArray(schema3.items) ? schema3.items : [schema3.items]); + scanArray(ourBase + "/extends", Array.isArray(schema3.extends) ? schema3.extends : [schema3.extends]); + scanSchema(ourBase + "/additionalItems", schema3.additionalItems); + scanObject(ourBase + "/properties", schema3.properties); + scanSchema(ourBase + "/additionalProperties", schema3.additionalProperties); + scanObject(ourBase + "/definitions", schema3.definitions); + scanObject(ourBase + "/patternProperties", schema3.patternProperties); + scanObject(ourBase + "/dependencies", schema3.dependencies); + scanArray(ourBase + "/disallow", schema3.disallow); + scanArray(ourBase + "/allOf", schema3.allOf); + scanArray(ourBase + "/anyOf", schema3.anyOf); + scanArray(ourBase + "/oneOf", schema3.oneOf); + scanSchema(ourBase + "/not", schema3.not); + } + function scanArray(baseuri, schemas) { + if (!Array.isArray(schemas)) return; + for (var i = 0; i < schemas.length; i++) { + scanSchema(baseuri + "/" + i, schemas[i]); + } + } + function scanObject(baseuri, schemas) { + if (!schemas || typeof schemas != "object") return; + for (var p in schemas) { + scanSchema(baseuri + "/" + p, schemas[p]); + } + } + var found = {}; + var ref = {}; + scanSchema(base, schema2); + return new SchemaScanResult(found, ref); + }; + } +}); + +// node_modules/jsonschema/lib/validator.js +var require_validator = __commonJS({ + "node_modules/jsonschema/lib/validator.js"(exports2, module2) { + "use strict"; + var urilib = require("url"); + var attribute = require_attribute(); + var helpers = require_helpers3(); + var scanSchema = require_scan().scan; + var ValidatorResult = helpers.ValidatorResult; + var ValidatorResultError = helpers.ValidatorResultError; + var SchemaError = helpers.SchemaError; + var SchemaContext = helpers.SchemaContext; + var anonymousBase = "/"; + var Validator2 = function Validator3() { + this.customFormats = Object.create(Validator3.prototype.customFormats); + this.schemas = {}; + this.unresolvedRefs = []; + this.types = Object.create(types); + this.attributes = Object.create(attribute.validators); + }; + Validator2.prototype.customFormats = {}; + Validator2.prototype.schemas = null; + Validator2.prototype.types = null; + Validator2.prototype.attributes = null; + Validator2.prototype.unresolvedRefs = null; + Validator2.prototype.addSchema = function addSchema(schema2, base) { + var self2 = this; + if (!schema2) { + return null; + } + var scan = scanSchema(base || anonymousBase, schema2); + var ourUri = base || schema2.$id || schema2.id; + for (var uri in scan.id) { + this.schemas[uri] = scan.id[uri]; + } + for (var uri in scan.ref) { + this.unresolvedRefs.push(uri); + } + this.unresolvedRefs = this.unresolvedRefs.filter(function(uri2) { + return typeof self2.schemas[uri2] === "undefined"; }); + return this.schemas[ourUri]; + }; + Validator2.prototype.addSubSchemaArray = function addSubSchemaArray(baseuri, schemas) { + if (!Array.isArray(schemas)) return; + for (var i = 0; i < schemas.length; i++) { + this.addSubSchema(baseuri, schemas[i]); + } + }; + Validator2.prototype.addSubSchemaObject = function addSubSchemaArray(baseuri, schemas) { + if (!schemas || typeof schemas != "object") return; + for (var p in schemas) { + this.addSubSchema(baseuri, schemas[p]); + } + }; + Validator2.prototype.setSchemas = function setSchemas(schemas) { + this.schemas = schemas; + }; + Validator2.prototype.getSchema = function getSchema(urn) { + return this.schemas[urn]; + }; + Validator2.prototype.validate = function validate(instance, schema2, options, ctx) { + if (typeof schema2 !== "boolean" && typeof schema2 !== "object" || schema2 === null) { + throw new SchemaError("Expected `schema` to be an object or boolean"); + } + if (!options) { + options = {}; + } + var id = schema2.$id || schema2.id; + var base = urilib.resolve(options.base || anonymousBase, id || ""); + if (!ctx) { + ctx = new SchemaContext(schema2, options, [], base, Object.create(this.schemas)); + if (!ctx.schemas[base]) { + ctx.schemas[base] = schema2; + } + var found = scanSchema(base, schema2); + for (var n in found.id) { + var sch = found.id[n]; + ctx.schemas[n] = sch; + } + } + if (options.required && instance === void 0) { + var result = new ValidatorResult(instance, schema2, options, ctx); + result.addError("is required, but is undefined"); + return result; + } + var result = this.validateSchema(instance, schema2, options, ctx); + if (!result) { + throw new Error("Result undefined"); + } else if (options.throwAll && result.errors.length) { + throw new ValidatorResultError(result); + } + return result; + }; + function shouldResolve(schema2) { + var ref = typeof schema2 === "string" ? schema2 : schema2.$ref; + if (typeof ref == "string") return ref; + return false; } - function saveCacheV2(paths_1, key_1, options_1) { - return __awaiter2(this, arguments, void 0, function* (paths, key, options, enableCrossOsArchive = false) { - options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); - const compressionMethod = yield utils.getCompressionMethod(); - const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core12.debug("Cache Paths:"); - core12.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + Validator2.prototype.validateSchema = function validateSchema(instance, schema2, options, ctx) { + var result = new ValidatorResult(instance, schema2, options, ctx); + if (typeof schema2 === "boolean") { + if (schema2 === true) { + schema2 = {}; + } else if (schema2 === false) { + schema2 = { type: [] }; } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path3.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core12.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core12.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core12.debug(`File Size: ${archiveFileSize}`); - options.archiveSizeBytes = archiveFileSize; - core12.debug("Reserving Cache"); - const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); - const request2 = { - key, - version - }; - let signedUploadUrl; - try { - const response = yield twirpClient.CreateCacheEntry(request2); - if (!response.ok) { - if (response.message) { - core12.warning(`Cache reservation failed: ${response.message}`); - } - throw new Error(response.message || "Response was not ok"); - } - signedUploadUrl = response.signedUploadUrl; - } catch (error3) { - core12.debug(`Failed to reserve cache: ${error3}`); - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core12.debug(`Attempting to upload cache located at: ${archivePath}`); - yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); - const finalizeRequest = { - key, - version, - sizeBytes: `${archiveFileSize}` - }; - const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); - core12.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); - if (!finalizeResponse.ok) { - if (finalizeResponse.message) { - throw new FinalizeCacheError(finalizeResponse.message); - } - throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); - } - cacheId = parseInt(finalizeResponse.entryId); - } catch (error3) { - const typedError = error3; - if (typedError.name === ValidationError.name) { - throw error3; - } else if (typedError.name === ReserveCacheError.name) { - core12.info(`Failed to save: ${typedError.message}`); - } else if (typedError.name === FinalizeCacheError.name) { - core12.warning(typedError.message); - } else { - if (typedError instanceof http_client_1.HttpClientError && typeof typedError.statusCode === "number" && typedError.statusCode >= 500) { - core12.error(`Failed to save: ${typedError.message}`); - } else { - core12.warning(`Failed to save: ${typedError.message}`); - } + } else if (!schema2) { + throw new Error("schema is undefined"); + } + if (schema2["extends"]) { + if (Array.isArray(schema2["extends"])) { + var schemaobj = { schema: schema2, ctx }; + schema2["extends"].forEach(this.schemaTraverser.bind(this, schemaobj)); + schema2 = schemaobj.schema; + schemaobj.schema = null; + schemaobj.ctx = null; + schemaobj = null; + } else { + schema2 = helpers.deepMerge(schema2, this.superResolve(schema2["extends"], ctx)); + } + } + var switchSchema = shouldResolve(schema2); + if (switchSchema) { + var resolved = this.resolve(schema2, switchSchema, ctx); + var subctx = new SchemaContext(resolved.subschema, options, ctx.path, resolved.switchSchema, ctx.schemas); + return this.validateSchema(instance, resolved.subschema, options, subctx); + } + var skipAttributes = options && options.skipAttributes || []; + for (var key in schema2) { + if (!attribute.ignoreProperties[key] && skipAttributes.indexOf(key) < 0) { + var validatorErr = null; + var validator = this.attributes[key]; + if (validator) { + validatorErr = validator.call(this, instance, schema2, options, ctx); + } else if (options.allowUnknownAttributes === false) { + throw new SchemaError("Unsupported attribute: " + key, schema2); } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error3) { - core12.debug(`Failed to delete archive: ${error3}`); + if (validatorErr) { + result.importErrors(validatorErr); } } - return cacheId; - }); - } + } + if (typeof options.rewrite == "function") { + var value = options.rewrite.call(this, instance, schema2, options, ctx); + result.instance = value; + } + return result; + }; + Validator2.prototype.schemaTraverser = function schemaTraverser(schemaobj, s) { + schemaobj.schema = helpers.deepMerge(schemaobj.schema, this.superResolve(s, schemaobj.ctx)); + }; + Validator2.prototype.superResolve = function superResolve(schema2, ctx) { + var ref = shouldResolve(schema2); + if (ref) { + return this.resolve(schema2, ref, ctx).subschema; + } + return schema2; + }; + Validator2.prototype.resolve = function resolve2(schema2, switchSchema, ctx) { + switchSchema = ctx.resolve(switchSchema); + if (ctx.schemas[switchSchema]) { + return { subschema: ctx.schemas[switchSchema], switchSchema }; + } + var parsed = urilib.parse(switchSchema); + var fragment = parsed && parsed.hash; + var document2 = fragment && fragment.length && switchSchema.substr(0, switchSchema.length - fragment.length); + if (!document2 || !ctx.schemas[document2]) { + throw new SchemaError("no such schema <" + switchSchema + ">", schema2); + } + var subschema = helpers.objectGetPath(ctx.schemas[document2], fragment.substr(1)); + if (subschema === void 0) { + throw new SchemaError("no such schema " + fragment + " located in <" + document2 + ">", schema2); + } + return { subschema, switchSchema }; + }; + Validator2.prototype.testType = function validateType(instance, schema2, options, ctx, type2) { + if (type2 === void 0) { + return; + } else if (type2 === null) { + throw new SchemaError('Unexpected null in "type" keyword'); + } + if (typeof this.types[type2] == "function") { + return this.types[type2].call(this, instance); + } + if (type2 && typeof type2 == "object") { + var res = this.validateSchema(instance, type2, options, ctx); + return res === void 0 || !(res && res.errors.length); + } + return true; + }; + var types = Validator2.prototype.types = {}; + types.string = function testString(instance) { + return typeof instance == "string"; + }; + types.number = function testNumber(instance) { + return typeof instance == "number" && isFinite(instance); + }; + types.integer = function testInteger(instance) { + return typeof instance == "number" && instance % 1 === 0; + }; + types.boolean = function testBoolean(instance) { + return typeof instance == "boolean"; + }; + types.array = function testArray(instance) { + return Array.isArray(instance); + }; + types["null"] = function testNull(instance) { + return instance === null; + }; + types.date = function testDate(instance) { + return instance instanceof Date; + }; + types.any = function testAny(instance) { + return true; + }; + types.object = function testObject(instance) { + return instance && typeof instance === "object" && !Array.isArray(instance) && !(instance instanceof Date); + }; + module2.exports = Validator2; + } +}); + +// node_modules/jsonschema/lib/index.js +var require_lib3 = __commonJS({ + "node_modules/jsonschema/lib/index.js"(exports2, module2) { + "use strict"; + var Validator2 = module2.exports.Validator = require_validator(); + module2.exports.ValidatorResult = require_helpers3().ValidatorResult; + module2.exports.ValidatorResultError = require_helpers3().ValidatorResultError; + module2.exports.ValidationError = require_helpers3().ValidationError; + module2.exports.SchemaError = require_helpers3().SchemaError; + module2.exports.SchemaScanResult = require_scan().SchemaScanResult; + module2.exports.scan = require_scan().scan; + module2.exports.validate = function(instance, schema2, options) { + var v = new Validator2(); + return v.validate(instance, schema2, options); + }; } }); // src/start-proxy-action.ts var import_child_process = require("child_process"); -var path2 = __toESM(require("path")); +var path3 = __toESM(require("path")); var core11 = __toESM(require_core()); var import_node_forge = __toESM(require_lib2()); @@ -117450,21 +117450,21 @@ async function getFolderSize(itemPath, options) { getFolderSize.loose = async (itemPath, options) => await core(itemPath, options); getFolderSize.strict = async (itemPath, options) => await core(itemPath, options, { strict: true }); async function core(rootItemPath, options = {}, returnType = {}) { - const fs = options.fs || await import("node:fs/promises"); + const fs2 = options.fs || await import("node:fs/promises"); let folderSize = 0n; const foundInos = /* @__PURE__ */ new Set(); const errors = []; await processItem(rootItemPath); async function processItem(itemPath) { if (options.ignore?.test(itemPath)) return; - const stats = returnType.strict ? await fs.lstat(itemPath, { bigint: true }) : await fs.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); + const stats = returnType.strict ? await fs2.lstat(itemPath, { bigint: true }) : await fs2.lstat(itemPath, { bigint: true }).catch((error3) => errors.push(error3)); if (typeof stats !== "object") return; if (!foundInos.has(stats.ino)) { foundInos.add(stats.ino); folderSize += stats.size; } if (stats.isDirectory()) { - const directoryItems = returnType.strict ? await fs.readdir(itemPath) : await fs.readdir(itemPath).catch((error3) => errors.push(error3)); + const directoryItems = returnType.strict ? await fs2.readdir(itemPath) : await fs2.readdir(itemPath).catch((error3) => errors.push(error3)); if (typeof directoryItems !== "object") return; await Promise.all( directoryItems.map( @@ -120087,6 +120087,34 @@ var safeDump = renamed("safeDump", "dump"); var semver = __toESM(require_semver2()); var GITHUB_DOTCOM_URL = "https://github.com"; var MINIMUM_CGROUP_MEMORY_LIMIT_BYTES = 1024 * 1024; +function parseGitHubUrl(inputUrl) { + const originalUrl = inputUrl; + if (inputUrl.indexOf("://") === -1) { + inputUrl = `https://${inputUrl}`; + } + if (!inputUrl.startsWith("http://") && !inputUrl.startsWith("https://")) { + throw new ConfigurationError(`"${originalUrl}" is not a http or https URL`); + } + let url; + try { + url = new URL(inputUrl); + } catch { + throw new ConfigurationError(`"${originalUrl}" is not a valid URL`); + } + if (url.hostname === "github.com" || url.hostname === "api.github.com") { + return GITHUB_DOTCOM_URL; + } + if (url.pathname.indexOf("/api/v3") !== -1) { + url.pathname = url.pathname.substring(0, url.pathname.indexOf("/api/v3")); + } + if (url.hostname.startsWith("api.")) { + url.hostname = url.hostname.substring(4); + } + if (!url.pathname.endsWith("/")) { + url.pathname = `${url.pathname}/`; + } + return url.toString(); +} function getRequiredEnvParam(paramName) { const value = process.env[paramName]; if (value === void 0 || value.length === 0) { @@ -120122,6 +120150,9 @@ var cachedCodeQlVersion = void 0; function getCachedCodeQlVersion() { return cachedCodeQlVersion; } +async function codeQlVersionAtLeast(codeql, requiredVersion) { + return semver.gte((await codeql.getVersion()).version, requiredVersion); +} async function delay(milliseconds, opts) { const { allowProcessExit } = opts || {}; return new Promise((resolve2) => { @@ -120243,27 +120274,8 @@ var persistInputs = function() { core4.saveState(persistedInputsKey, JSON.stringify(inputEnvironmentVariables)); }; -// src/logging.ts -var core5 = __toESM(require_core()); -function getActionsLogger() { - return { - debug: core5.debug, - info: core5.info, - warning: core5.warning, - error: core5.error, - isDebug: core5.isDebug, - startGroup: core5.startGroup, - endGroup: core5.endGroup - }; -} - -// src/start-proxy.ts -var path = __toESM(require("path")); -var core10 = __toESM(require_core()); -var toolcache = __toESM(require_tool_cache()); - // src/api-client.ts -var core6 = __toESM(require_core()); +var core5 = __toESM(require_core()); var githubUtils = __toESM(require_utils5()); // node_modules/@octokit/plugin-retry/dist-bundle/index.js @@ -120362,6 +120374,7 @@ function parseRepositoryNwo(input) { } // src/api-client.ts +var GITHUB_ENTERPRISE_VERSION_HEADER = "x-github-enterprise-version"; function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) { const auth2 = allowExternal && apiDetails.externalRepoAuth || apiDetails.auth; const retryingOctokit = githubUtils.GitHub.plugin(retry); @@ -120370,10 +120383,10 @@ function createApiClientWithDetails(apiDetails, { allowExternal = false } = {}) baseUrl: apiDetails.apiURL, userAgent: `CodeQL-Action/${getActionVersion()}`, log: { - debug: core6.debug, - info: core6.info, - warn: core6.warning, - error: core6.error + debug: core5.debug, + info: core5.info, + warn: core5.warning, + error: core5.error } }) ); @@ -120396,6 +120409,30 @@ function getAuthorizationHeaderFor(logger, apiDetails, url) { logger.debug(`Not using an authorization token.`); return void 0; } +var cachedGitHubVersion = void 0; +async function getGitHubVersionFromApi(apiClient, apiDetails) { + if (parseGitHubUrl(apiDetails.url) === GITHUB_DOTCOM_URL) { + return { type: "GitHub.com" /* DOTCOM */ }; + } + const response = await apiClient.rest.meta.get(); + if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === void 0) { + return { type: "GitHub.com" /* DOTCOM */ }; + } + if (response.headers[GITHUB_ENTERPRISE_VERSION_HEADER] === "ghe.com") { + return { type: "GitHub Enterprise Cloud with data residency" /* GHEC_DR */ }; + } + const version = response.headers[GITHUB_ENTERPRISE_VERSION_HEADER]; + return { type: "GitHub Enterprise Server" /* GHES */, version }; +} +async function getGitHubVersion() { + if (cachedGitHubVersion === void 0) { + cachedGitHubVersion = await getGitHubVersionFromApi( + getApiClient(), + getApiDetails() + ); + } + return cachedGitHubVersion; +} async function getWorkflowRelativePath() { const repo_nwo = getRepositoryNwo(); const run_id = Number(getRequiredEnvParam("GITHUB_RUN_ID")); @@ -120424,112 +120461,34 @@ async function getAnalysisKey() { const workflowPath = await getWorkflowRelativePath(); const jobName = getRequiredEnvParam("GITHUB_JOB"); analysisKey = `${workflowPath}:${jobName}`; - core6.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey); + core5.exportVariable("CODEQL_ACTION_ANALYSIS_KEY" /* ANALYSIS_KEY */, analysisKey); return analysisKey; } -// src/artifact-scanner.ts -var exec = __toESM(require_exec()); -var GITHUB_PAT_CLASSIC_PATTERN = { - type: "Personal Access Token (Classic)" /* PersonalAccessClassic */, - pattern: /\bghp_[a-zA-Z0-9]{36}\b/g -}; -var GITHUB_PAT_FINE_GRAINED_PATTERN = { - type: "Personal Access Token (Fine-grained)" /* PersonalAccessFineGrained */, - pattern: /\bgithub_pat_[a-zA-Z0-9_]+\b/g -}; -var GITHUB_TOKEN_PATTERNS = [ - GITHUB_PAT_CLASSIC_PATTERN, - GITHUB_PAT_FINE_GRAINED_PATTERN, - { - type: "OAuth Access Token" /* OAuth */, - pattern: /\bgho_[a-zA-Z0-9]{36}\b/g - }, - { - type: "User-to-Server Token" /* UserToServer */, - pattern: /\bghu_[a-zA-Z0-9]{36}\b/g - }, - { - type: "Server-to-Server Token" /* ServerToServer */, - pattern: /\bghs_[a-zA-Z0-9]{36}\b/g - }, - { - type: "Refresh Token" /* Refresh */, - pattern: /\bghr_[a-zA-Z0-9]{36}\b/g - }, - { - type: "App Installation Access Token" /* AppInstallationAccess */, - pattern: /\bghs_[a-zA-Z0-9]{255}\b/g - } -]; -function isAuthToken(value, patterns = GITHUB_TOKEN_PATTERNS) { - for (const { type: type2, pattern } of patterns) { - if (value.match(pattern)) { - return type2; - } - } - return void 0; -} +// src/feature-flags.ts +var fs = __toESM(require("fs")); +var path = __toESM(require("path")); +var semver4 = __toESM(require_semver2()); // src/defaults.json var bundleVersion = "codeql-bundle-v2.24.1"; var cliVersion = "2.24.1"; -// src/languages.ts -var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { - KnownLanguage2["actions"] = "actions"; - KnownLanguage2["cpp"] = "cpp"; - KnownLanguage2["csharp"] = "csharp"; - KnownLanguage2["go"] = "go"; - KnownLanguage2["java"] = "java"; - KnownLanguage2["javascript"] = "javascript"; - KnownLanguage2["python"] = "python"; - KnownLanguage2["ruby"] = "ruby"; - KnownLanguage2["rust"] = "rust"; - KnownLanguage2["swift"] = "swift"; - return KnownLanguage2; -})(KnownLanguage || {}); - -// src/status-report.ts -var os = __toESM(require("os")); -var core9 = __toESM(require_core()); - -// src/analyses.ts -var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { - AnalysisKind2["CodeScanning"] = "code-scanning"; - AnalysisKind2["CodeQuality"] = "code-quality"; - return AnalysisKind2; -})(AnalysisKind || {}); -var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); - -// src/caching-utils.ts -var core7 = __toESM(require_core()); - -// src/config/db-config.ts -var jsonschema = __toESM(require_lib3()); -var semver2 = __toESM(require_semver2()); -var PACK_IDENTIFIER_PATTERN = (function() { - const alphaNumeric = "[a-z0-9]"; - const alphaNumericDash = "[a-z0-9-]"; - const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`; - return new RegExp(`^${component}/${component}$`); -})(); - -// src/feature-flags.ts -var semver5 = __toESM(require_semver2()); - // src/overlay-database-utils.ts var actionsCache = __toESM(require_cache5()); +// src/caching-utils.ts +var core6 = __toESM(require_core()); + // src/git-utils.ts -var core8 = __toESM(require_core()); +var core7 = __toESM(require_core()); var toolrunner2 = __toESM(require_toolrunner()); var io3 = __toESM(require_io()); -var semver3 = __toESM(require_semver2()); +var semver2 = __toESM(require_semver2()); var runGitCommand = async function(workingDirectory, args, customErrorMessage, options) { let stdout = ""; let stderr = ""; - core8.debug(`Running git command: git ${args.join(" ")}`); + core7.debug(`Running git command: git ${args.join(" ")}`); try { await new toolrunner2.ToolRunner(await io3.which("git", true), args, { silent: true, @@ -120550,7 +120509,7 @@ var runGitCommand = async function(workingDirectory, args, customErrorMessage, o if (stderr.includes("not a git repository")) { reason = "The checkout path provided to the action does not appear to be a git repository."; } - core8.info(`git call failed. ${customErrorMessage} Error: ${reason}`); + core7.info(`git call failed. ${customErrorMessage} Error: ${reason}`); throw error3; } }; @@ -120606,7 +120565,7 @@ async function getRef() { ) !== head; if (hasChangedRef) { const newRef = ref.replace(pull_ref_regex, "refs/pull/$1/head"); - core8.debug( + core7.debug( `No longer on merge commit, rewriting ref from ${ref} to ${newRef}.` ); return newRef; @@ -120615,15 +120574,31 @@ async function getRef() { } } +// src/logging.ts +var core8 = __toESM(require_core()); +function getActionsLogger() { + return { + debug: core8.debug, + info: core8.info, + warning: core8.warning, + error: core8.error, + isDebug: core8.isDebug, + startGroup: core8.startGroup, + endGroup: core8.endGroup + }; +} + // src/overlay-database-utils.ts var CODEQL_OVERLAY_MINIMUM_VERSION = "2.23.8"; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB = 7500; var OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_BYTES = OVERLAY_BASE_DATABASE_MAX_UPLOAD_SIZE_MB * 1e6; // src/tools-features.ts -var semver4 = __toESM(require_semver2()); +var semver3 = __toESM(require_semver2()); // src/feature-flags.ts +var DEFAULT_VERSION_FEATURE_FLAG_PREFIX = "default_codeql_version_"; +var DEFAULT_VERSION_FEATURE_FLAG_SUFFIX = "_enabled"; var featureConfig = { ["allow_toolcache_input" /* AllowToolcacheInput */]: { defaultValue: false, @@ -120832,6 +120807,367 @@ var featureConfig = { minimumVersion: void 0 } }; +var FEATURE_FLAGS_FILE_NAME = "cached-feature-flags.json"; +var Features = class { + constructor(gitHubVersion, repositoryNwo, tempDir, logger) { + this.logger = logger; + this.gitHubFeatureFlags = new GitHubFeatureFlags( + gitHubVersion, + repositoryNwo, + path.join(tempDir, FEATURE_FLAGS_FILE_NAME), + logger + ); + } + gitHubFeatureFlags; + async getDefaultCliVersion(variant) { + return await this.gitHubFeatureFlags.getDefaultCliVersion(variant); + } + /** + * + * @param feature The feature to check. + * @param codeql An optional CodeQL object. If provided, and a `minimumVersion` is specified for the + * feature, the version of the CodeQL CLI will be checked against the minimum version. + * If the version is less than the minimum version, the feature will be considered + * disabled. If not provided, and a `minimumVersion` is specified for the feature, the + * this function will throw. + * @returns true if the feature is enabled, false otherwise. + * + * @throws if a `minimumVersion` is specified for the feature, and `codeql` is not provided. + */ + async getValue(feature, codeql) { + const config = featureConfig[feature]; + if (!codeql && config.minimumVersion) { + throw new Error( + `Internal error: A minimum version is specified for feature ${feature}, but no instance of CodeQL was provided.` + ); + } + if (!codeql && config.toolsFeature) { + throw new Error( + `Internal error: A required tools feature is specified for feature ${feature}, but no instance of CodeQL was provided.` + ); + } + const envVar = (process.env[config.envVar] || "").toLocaleLowerCase(); + if (envVar === "false") { + this.logger.debug( + `Feature ${feature} is disabled via the environment variable ${config.envVar}.` + ); + return false; + } + const minimumVersion = config.minimumVersion; + if (codeql && minimumVersion) { + if (!await codeQlVersionAtLeast(codeql, minimumVersion)) { + this.logger.debug( + `Feature ${feature} is disabled because the CodeQL CLI version is older than the minimum version ${minimumVersion}.` + ); + return false; + } else { + this.logger.debug( + `CodeQL CLI version ${(await codeql.getVersion()).version} is newer than the minimum version ${minimumVersion} for feature ${feature}.` + ); + } + } + const toolsFeature = config.toolsFeature; + if (codeql && toolsFeature) { + if (!await codeql.supportsFeature(toolsFeature)) { + this.logger.debug( + `Feature ${feature} is disabled because the CodeQL CLI version does not support the required tools feature ${toolsFeature}.` + ); + return false; + } else { + this.logger.debug( + `CodeQL CLI version ${(await codeql.getVersion()).version} supports the required tools feature ${toolsFeature} for feature ${feature}.` + ); + } + } + if (envVar === "true") { + this.logger.debug( + `Feature ${feature} is enabled via the environment variable ${config.envVar}.` + ); + return true; + } + const apiValue = await this.gitHubFeatureFlags.getValue(feature); + if (apiValue !== void 0) { + this.logger.debug( + `Feature ${feature} is ${apiValue ? "enabled" : "disabled"} via the GitHub API.` + ); + return apiValue; + } + const defaultValue = config.defaultValue; + this.logger.debug( + `Feature ${feature} is ${defaultValue ? "enabled" : "disabled"} due to its default value.` + ); + return defaultValue; + } +}; +var GitHubFeatureFlags = class { + constructor(gitHubVersion, repositoryNwo, featureFlagsFile, logger) { + this.gitHubVersion = gitHubVersion; + this.repositoryNwo = repositoryNwo; + this.featureFlagsFile = featureFlagsFile; + this.logger = logger; + this.hasAccessedRemoteFeatureFlags = false; + } + cachedApiResponse; + // We cache whether the feature flags were accessed or not in order to accurately report whether flags were + // incorrectly configured vs. inaccessible in our telemetry. + hasAccessedRemoteFeatureFlags; + getCliVersionFromFeatureFlag(f) { + if (!f.startsWith(DEFAULT_VERSION_FEATURE_FLAG_PREFIX) || !f.endsWith(DEFAULT_VERSION_FEATURE_FLAG_SUFFIX)) { + return void 0; + } + const version = f.substring( + DEFAULT_VERSION_FEATURE_FLAG_PREFIX.length, + f.length - DEFAULT_VERSION_FEATURE_FLAG_SUFFIX.length + ).replace(/_/g, "."); + if (!semver4.valid(version)) { + this.logger.warning( + `Ignoring feature flag ${f} as it does not specify a valid CodeQL version.` + ); + return void 0; + } + return version; + } + async getDefaultCliVersion(variant) { + if (supportsFeatureFlags(variant)) { + return await this.getDefaultCliVersionFromFlags(); + } + return { + cliVersion, + tagName: bundleVersion + }; + } + async getDefaultCliVersionFromFlags() { + const response = await this.getAllFeatures(); + const enabledFeatureFlagCliVersions = Object.entries(response).map( + ([f, isEnabled]) => isEnabled ? this.getCliVersionFromFeatureFlag(f) : void 0 + ).filter((f) => f !== void 0); + if (enabledFeatureFlagCliVersions.length === 0) { + this.logger.warning( + `Feature flags do not specify a default CLI version. Falling back to the CLI version shipped with the Action. This is ${cliVersion}.` + ); + const result = { + cliVersion, + tagName: bundleVersion + }; + if (this.hasAccessedRemoteFeatureFlags) { + result.toolsFeatureFlagsValid = false; + } + return result; + } + const maxCliVersion = enabledFeatureFlagCliVersions.reduce( + (maxVersion, currentVersion) => currentVersion > maxVersion ? currentVersion : maxVersion, + enabledFeatureFlagCliVersions[0] + ); + this.logger.debug( + `Derived default CLI version of ${maxCliVersion} from feature flags.` + ); + return { + cliVersion: maxCliVersion, + tagName: `codeql-bundle-v${maxCliVersion}`, + toolsFeatureFlagsValid: true + }; + } + async getValue(feature) { + const response = await this.getAllFeatures(); + if (response === void 0) { + this.logger.debug(`No feature flags API response for ${feature}.`); + return void 0; + } + const features = response[feature]; + if (features === void 0) { + this.logger.debug(`Feature '${feature}' undefined in API response.`); + return void 0; + } + return !!features; + } + async getAllFeatures() { + if (this.cachedApiResponse !== void 0) { + return this.cachedApiResponse; + } + const fileFlags = await this.readLocalFlags(); + if (fileFlags !== void 0) { + this.cachedApiResponse = fileFlags; + return fileFlags; + } + let remoteFlags = await this.loadApiResponse(); + if (remoteFlags === void 0) { + remoteFlags = {}; + } + this.cachedApiResponse = remoteFlags; + await this.writeLocalFlags(remoteFlags); + return remoteFlags; + } + async readLocalFlags() { + try { + if (fs.existsSync(this.featureFlagsFile)) { + this.logger.debug( + `Loading feature flags from ${this.featureFlagsFile}` + ); + return JSON.parse( + fs.readFileSync(this.featureFlagsFile, "utf8") + ); + } + } catch (e) { + this.logger.warning( + `Error reading cached feature flags file ${this.featureFlagsFile}: ${e}. Requesting from GitHub instead.` + ); + } + return void 0; + } + async writeLocalFlags(flags) { + try { + this.logger.debug(`Writing feature flags to ${this.featureFlagsFile}`); + fs.writeFileSync(this.featureFlagsFile, JSON.stringify(flags)); + } catch (e) { + this.logger.warning( + `Error writing cached feature flags file ${this.featureFlagsFile}: ${e}.` + ); + } + } + async loadApiResponse() { + if (!supportsFeatureFlags(this.gitHubVersion.type)) { + this.logger.debug( + "Not running against github.com. Disabling all toggleable features." + ); + this.hasAccessedRemoteFeatureFlags = false; + return {}; + } + try { + const featuresToRequest = Object.entries(featureConfig).filter( + ([, config]) => !config.legacyApi + ).map(([f]) => f); + const FEATURES_PER_REQUEST = 25; + const featureChunks = []; + while (featuresToRequest.length > 0) { + featureChunks.push(featuresToRequest.splice(0, FEATURES_PER_REQUEST)); + } + let remoteFlags = {}; + for (const chunk of featureChunks) { + const response = await getApiClient().request( + "GET /repos/:owner/:repo/code-scanning/codeql-action/features", + { + owner: this.repositoryNwo.owner, + repo: this.repositoryNwo.repo, + features: chunk.join(",") + } + ); + const chunkFlags = response.data; + remoteFlags = { ...remoteFlags, ...chunkFlags }; + } + this.logger.debug( + "Loaded the following default values for the feature flags from the CodeQL Action API:" + ); + for (const [feature, value] of Object.entries(remoteFlags).sort( + ([nameA], [nameB]) => nameA.localeCompare(nameB) + )) { + this.logger.debug(` ${feature}: ${value}`); + } + this.hasAccessedRemoteFeatureFlags = true; + return remoteFlags; + } catch (e) { + const httpError = asHTTPError(e); + if (httpError?.status === 403) { + this.logger.warning( + `This run of the CodeQL Action does not have permission to access the CodeQL Action API endpoints. As a result, it will not be opted into any experimental features. This could be because the Action is running on a pull request from a fork. If not, please ensure the workflow has at least the 'security-events: read' permission. Details: ${httpError.message}` + ); + this.hasAccessedRemoteFeatureFlags = false; + return {}; + } else { + throw new Error( + `Encountered an error while trying to determine feature enablement: ${e}` + ); + } + } + } +}; +function supportsFeatureFlags(githubVariant) { + return githubVariant === "GitHub.com" /* DOTCOM */ || githubVariant === "GitHub Enterprise Cloud with data residency" /* GHEC_DR */; +} + +// src/start-proxy.ts +var path2 = __toESM(require("path")); +var core10 = __toESM(require_core()); +var toolcache = __toESM(require_tool_cache()); + +// src/artifact-scanner.ts +var exec = __toESM(require_exec()); +var GITHUB_PAT_CLASSIC_PATTERN = { + type: "Personal Access Token (Classic)" /* PersonalAccessClassic */, + pattern: /\bghp_[a-zA-Z0-9]{36}\b/g +}; +var GITHUB_PAT_FINE_GRAINED_PATTERN = { + type: "Personal Access Token (Fine-grained)" /* PersonalAccessFineGrained */, + pattern: /\bgithub_pat_[a-zA-Z0-9_]+\b/g +}; +var GITHUB_TOKEN_PATTERNS = [ + GITHUB_PAT_CLASSIC_PATTERN, + GITHUB_PAT_FINE_GRAINED_PATTERN, + { + type: "OAuth Access Token" /* OAuth */, + pattern: /\bgho_[a-zA-Z0-9]{36}\b/g + }, + { + type: "User-to-Server Token" /* UserToServer */, + pattern: /\bghu_[a-zA-Z0-9]{36}\b/g + }, + { + type: "Server-to-Server Token" /* ServerToServer */, + pattern: /\bghs_[a-zA-Z0-9]{36}\b/g + }, + { + type: "Refresh Token" /* Refresh */, + pattern: /\bghr_[a-zA-Z0-9]{36}\b/g + }, + { + type: "App Installation Access Token" /* AppInstallationAccess */, + pattern: /\bghs_[a-zA-Z0-9]{255}\b/g + } +]; +function isAuthToken(value, patterns = GITHUB_TOKEN_PATTERNS) { + for (const { type: type2, pattern } of patterns) { + if (value.match(pattern)) { + return type2; + } + } + return void 0; +} + +// src/languages.ts +var KnownLanguage = /* @__PURE__ */ ((KnownLanguage2) => { + KnownLanguage2["actions"] = "actions"; + KnownLanguage2["cpp"] = "cpp"; + KnownLanguage2["csharp"] = "csharp"; + KnownLanguage2["go"] = "go"; + KnownLanguage2["java"] = "java"; + KnownLanguage2["javascript"] = "javascript"; + KnownLanguage2["python"] = "python"; + KnownLanguage2["ruby"] = "ruby"; + KnownLanguage2["rust"] = "rust"; + KnownLanguage2["swift"] = "swift"; + return KnownLanguage2; +})(KnownLanguage || {}); + +// src/status-report.ts +var os = __toESM(require("os")); +var core9 = __toESM(require_core()); + +// src/analyses.ts +var AnalysisKind = /* @__PURE__ */ ((AnalysisKind2) => { + AnalysisKind2["CodeScanning"] = "code-scanning"; + AnalysisKind2["CodeQuality"] = "code-quality"; + return AnalysisKind2; +})(AnalysisKind || {}); +var supportedAnalysisKinds = new Set(Object.values(AnalysisKind)); + +// src/config/db-config.ts +var jsonschema = __toESM(require_lib3()); +var semver5 = __toESM(require_semver2()); +var PACK_IDENTIFIER_PATTERN = (function() { + const alphaNumeric = "[a-z0-9]"; + const alphaNumericDash = "[a-z0-9-]"; + const component = `${alphaNumeric}(${alphaNumericDash}*${alphaNumeric})?`; + return new RegExp(`^${component}/${component}$`); +})(); // src/trap-caching.ts var actionsCache2 = __toESM(require_cache5()); @@ -121344,7 +121680,7 @@ async function getProxyBinaryPath(logger) { proxyInfo.version ); } - return path.join(proxyBin, proxyFileName); + return path2.join(proxyBin, proxyFileName); } // src/start-proxy-action.ts @@ -121396,12 +121732,21 @@ function generateCertificateAuthority() { } async function run(startedAt) { const logger = getActionsLogger(); + let features; let language; try { persistInputs(); const tempDir = getTemporaryDirectory(); - const proxyLogFilePath = path2.resolve(tempDir, "proxy.log"); + const proxyLogFilePath = path3.resolve(tempDir, "proxy.log"); core11.saveState("proxy-log-file", proxyLogFilePath); + const repositoryNwo = getRepositoryNwo(); + const gitHubVersion = await getGitHubVersion(); + features = new Features( + gitHubVersion, + repositoryNwo, + getTemporaryDirectory(), + logger + ); const languageInput = getOptionalInput("language"); language = languageInput ? parseLanguage(languageInput) : void 0; const credentials = getCredentials( diff --git a/src/start-proxy-action.ts b/src/start-proxy-action.ts index 195df354b5..a27782c985 100644 --- a/src/start-proxy-action.ts +++ b/src/start-proxy-action.ts @@ -5,8 +5,11 @@ import * as core from "@actions/core"; import { pki } from "node-forge"; import * as actionsUtil from "./actions-util"; +import { getGitHubVersion } from "./api-client"; +import { Features } from "./feature-flags"; import { KnownLanguage } from "./languages"; import { getActionsLogger, Logger } from "./logging"; +import { getRepositoryNwo } from "./repository"; import { Credential, credentialToStr, @@ -92,6 +95,7 @@ async function run(startedAt: Date) { // possible, and only use safe functions outside. const logger = getActionsLogger(); + let features: Features | undefined; let language: KnownLanguage | undefined; try { @@ -103,9 +107,22 @@ async function run(startedAt: Date) { const proxyLogFilePath = path.resolve(tempDir, "proxy.log"); core.saveState("proxy-log-file", proxyLogFilePath); - // Get the configuration options + // Initialise FFs, but only load them from disk if they are already available. + const repositoryNwo = getRepositoryNwo(); + const gitHubVersion = await getGitHubVersion(); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + features = new Features( + gitHubVersion, + repositoryNwo, + actionsUtil.getTemporaryDirectory(), + logger, + ); + + // Get the language input. const languageInput = actionsUtil.getOptionalInput("language"); language = languageInput ? parseLanguage(languageInput) : undefined; + + // Get the registry configurations from one of the inputs. const credentials = getCredentials( logger, actionsUtil.getOptionalInput("registry_secrets"),