import { __commonJS, __require, __toESM, import_core, require_auth, require_core, require_exec, require_io, require_lib, require_semver } from "./semver-C43QPvfi.mjs"; import process$1 from "node:process"; //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-glob-options-helper.js var require_internal_glob_options_helper$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports) { var __createBinding$25 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$24 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$25 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$25(result, mod, k); } __setModuleDefault$24(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.getOptions = void 0; const core$11 = __importStar$25(require_core()); /** * Returns a copy with defaults filled in. */ function getOptions$1(copy$1) { const result = { followSymbolicLinks: true, implicitDescendants: true, omitBrokenSymbolicLinks: true }; if (copy$1) { if (typeof copy$1.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy$1.followSymbolicLinks; core$11.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy$1.implicitDescendants === "boolean") { result.implicitDescendants = copy$1.implicitDescendants; core$11.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy$1.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy$1.omitBrokenSymbolicLinks; core$11.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; } exports.getOptions = getOptions$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-path-helper.js var require_internal_path_helper$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-path-helper.js"(exports) { var __createBinding$24 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$23 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$24 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$24(result, mod, k); } __setModuleDefault$23(result, mod); return result; }; var __importDefault$9 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path$12 = __importStar$24(__require("path")); const assert_1$11 = __importDefault$9(__require("assert")); const IS_WINDOWS$10 = process.platform === "win32"; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname$1(p) { p = safeTrimTrailingSeparator$1(p); if (IS_WINDOWS$10 && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) return p; let result = path$12.dirname(p); if (IS_WINDOWS$10 && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) result = safeTrimTrailingSeparator$1(result); return result; } exports.dirname = dirname$1; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot$1(root, itemPath) { assert_1$11.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); assert_1$11.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); if (hasAbsoluteRoot$1(itemPath)) return itemPath; if (IS_WINDOWS$10) { if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); assert_1$11.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) if (itemPath.length === 2) return `${itemPath[0]}:\\${cwd.substr(3)}`; else { if (!cwd.endsWith("\\")) cwd += "\\"; return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } else return `${itemPath[0]}:\\${itemPath.substr(2)}`; } else if (normalizeSeparators$1(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); assert_1$11.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } assert_1$11.default(hasAbsoluteRoot$1(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS$10 && root.endsWith("\\")) {} else root += path$12.sep; return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot$1; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot$1(itemPath) { assert_1$11.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); itemPath = normalizeSeparators$1(itemPath); if (IS_WINDOWS$10) return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); return itemPath.startsWith("/"); } exports.hasAbsoluteRoot = hasAbsoluteRoot$1; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot$1(itemPath) { assert_1$11.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); itemPath = normalizeSeparators$1(itemPath); if (IS_WINDOWS$10) return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); return itemPath.startsWith("/"); } exports.hasRoot = hasRoot$1; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators$1(p) { p = p || ""; if (IS_WINDOWS$10) { p = p.replace(/\//g, "\\"); const isUnc = /^\\\\+[^\\]/.test(p); return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); } return p.replace(/\/\/+/g, "/"); } exports.normalizeSeparators = normalizeSeparators$1; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator$1(p) { if (!p) return ""; p = normalizeSeparators$1(p); if (!p.endsWith(path$12.sep)) return p; if (p === path$12.sep) return p; if (IS_WINDOWS$10 && /^[A-Z]:\\$/i.test(p)) return p; return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-match-kind.js var require_internal_match_kind$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-match-kind.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind$1; (function(MatchKind$2) { /** Not matched */ MatchKind$2[MatchKind$2["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind$2[MatchKind$2["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind$2[MatchKind$2["File"] = 2] = "File"; /** Matched */ MatchKind$2[MatchKind$2["All"] = 3] = "All"; })(MatchKind$1 = exports.MatchKind || (exports.MatchKind = {})); } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-pattern-helper.js var require_internal_pattern_helper$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports) { var __createBinding$23 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$22 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$23 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$23(result, mod, k); } __setModuleDefault$22(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper$5 = __importStar$23(require_internal_path_helper$1()); const internal_match_kind_1$5 = require_internal_match_kind$1(); const IS_WINDOWS$9 = process.platform === "win32"; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths$1(patterns) { patterns = patterns.filter((x) => !x.negate); const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS$9 ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = "candidate"; } const result = []; for (const pattern of patterns) { const key = IS_WINDOWS$9 ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === "included") continue; let foundAncestor = false; let tempKey = key; let parent = pathHelper$5.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper$5.dirname(tempKey); } if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = "included"; } } return result; } exports.getSearchPaths = getSearchPaths$1; /** * Matches the patterns against the path */ function match$1(patterns, itemPath) { let result = internal_match_kind_1$5.MatchKind.None; for (const pattern of patterns) if (pattern.negate) result &= ~pattern.match(itemPath); else result |= pattern.match(itemPath); return result; } exports.match = match$1; /** * Checks whether to descend further into the directory */ function partialMatch$1(patterns, itemPath) { return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch$1; } }); //#endregion //#region node_modules/.deno/concat-map@0.0.1/node_modules/concat-map/index.js var require_concat_map = __commonJS({ "node_modules/.deno/concat-map@0.0.1/node_modules/concat-map/index.js"(exports, module) { module.exports = function(xs, fn) { var res = []; for (var i = 0; i < xs.length; i++) { var x = fn(xs[i], i); if (isArray(x)) res.push.apply(res, x); else res.push(x); } return res; }; var isArray = Array.isArray || function(xs) { return Object.prototype.toString.call(xs) === "[object Array]"; }; } }); //#endregion //#region node_modules/.deno/balanced-match@1.0.2/node_modules/balanced-match/index.js var require_balanced_match = __commonJS({ "node_modules/.deno/balanced-match@1.0.2/node_modules/balanced-match/index.js"(exports, module) { module.exports = balanced$1; function balanced$1(a, b, str) { if (a instanceof RegExp) a = maybeMatch(a, str); if (b instanceof RegExp) b = maybeMatch(b, str); var r = range$1(a, b, str); return r && { start: r[0], end: r[1], pre: str.slice(0, r[0]), body: str.slice(r[0] + a.length, r[1]), post: str.slice(r[1] + b.length) }; } function maybeMatch(reg, str) { var m$1 = str.match(reg); return m$1 ? m$1[0] : null; } balanced$1.range = range$1; function range$1(a, b, str) { var begs, beg, left, right, result; var ai = str.indexOf(a); var bi = str.indexOf(b, ai + 1); var i = ai; if (ai >= 0 && bi > 0) { if (a === b) return [ai, bi]; begs = []; left = str.length; while (i >= 0 && !result) { if (i == ai) { begs.push(i); ai = str.indexOf(a, i + 1); } else if (begs.length == 1) result = [begs.pop(), bi]; else { beg = begs.pop(); if (beg < left) { left = beg; right = bi; } bi = str.indexOf(b, i + 1); } i = ai < bi && ai >= 0 ? ai : bi; } if (begs.length) result = [left, right]; } return result; } } }); //#endregion //#region node_modules/.deno/brace-expansion@1.1.11/node_modules/brace-expansion/index.js var require_brace_expansion = __commonJS({ "node_modules/.deno/brace-expansion@1.1.11/node_modules/brace-expansion/index.js"(exports, module) { var concatMap = require_concat_map(); var balanced = require_balanced_match(); module.exports = expandTop; var escSlash = "\0SLASH" + Math.random() + "\0"; var escOpen = "\0OPEN" + Math.random() + "\0"; var escClose = "\0CLOSE" + Math.random() + "\0"; var escComma = "\0COMMA" + Math.random() + "\0"; var escPeriod = "\0PERIOD" + Math.random() + "\0"; function numeric(str) { return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); } function escapeBraces(str) { return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); } function unescapeBraces(str) { return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); } function parseCommaParts(str) { if (!str) return [""]; var parts = []; var m$1 = balanced("{", "}", str); if (!m$1) return str.split(","); var pre = m$1.pre; var body$1 = m$1.body; var post = m$1.post; var p = pre.split(","); p[p.length - 1] += "{" + body$1 + "}"; var postParts = parseCommaParts(post); if (post.length) { p[p.length - 1] += postParts.shift(); p.push.apply(p, postParts); } parts.push.apply(parts, p); return parts; } function expandTop(str) { if (!str) return []; if (str.substr(0, 2) === "{}") str = "\\{\\}" + str.substr(2); return expand$1(escapeBraces(str), true).map(unescapeBraces); } function embrace(str) { return "{" + str + "}"; } function isPadded(el) { return /^-?0\d/.test(el); } function lte(i, y$1) { return i <= y$1; } function gte(i, y$1) { return i >= y$1; } function expand$1(str, isTop) { var expansions = []; var m$1 = balanced("{", "}", str); if (!m$1 || /\$$/.test(m$1.pre)) return [str]; var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m$1.body); var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m$1.body); var isSequence = isNumericSequence || isAlphaSequence; var isOptions = m$1.body.indexOf(",") >= 0; if (!isSequence && !isOptions) { if (m$1.post.match(/,.*\}/)) { str = m$1.pre + "{" + m$1.body + escClose + m$1.post; return expand$1(str); } return [str]; } var n; if (isSequence) n = m$1.body.split(/\.\./); else { n = parseCommaParts(m$1.body); if (n.length === 1) { n = expand$1(n[0], false).map(embrace); if (n.length === 1) { var post = m$1.post.length ? expand$1(m$1.post, false) : [""]; return post.map(function(p) { return m$1.pre + n[0] + p; }); } } } var pre = m$1.pre; var post = m$1.post.length ? expand$1(m$1.post, false) : [""]; var N; if (isSequence) { var x = numeric(n[0]); var y$1 = numeric(n[1]); var width = Math.max(n[0].length, n[1].length); var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; var test = lte; var reverse = y$1 < x; if (reverse) { incr *= -1; test = gte; } var pad = n.some(isPadded); N = []; for (var i = x; test(i, y$1); i += incr) { var c; if (isAlphaSequence) { c = String.fromCharCode(i); if (c === "\\") c = ""; } else { c = String(i); if (pad) { var need = width - c.length; if (need > 0) { var z = new Array(need + 1).join("0"); if (i < 0) c = "-" + z + c.slice(1); else c = z + c; } } } N.push(c); } } else N = concatMap(n, function(el) { return expand$1(el, false); }); for (var j = 0; j < N.length; j++) for (var k = 0; k < post.length; k++) { var expansion = pre + N[j] + post[k]; if (!isTop || isSequence || expansion) expansions.push(expansion); } return expansions; } } }); //#endregion //#region node_modules/.deno/minimatch@3.1.2/node_modules/minimatch/minimatch.js var require_minimatch = __commonJS({ "node_modules/.deno/minimatch@3.1.2/node_modules/minimatch/minimatch.js"(exports, module) { module.exports = minimatch; minimatch.Minimatch = Minimatch; var path$11 = function() { try { return __require("path"); } catch (e) {} }() || { sep: "/" }; minimatch.sep = path$11.sep; var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}; var expand = require_brace_expansion(); var plTypes = { "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, "?": { open: "(?:", close: ")?" }, "+": { open: "(?:", close: ")+" }, "*": { open: "(?:", close: ")*" }, "@": { open: "(?:", close: ")" } }; var qmark = "[^/]"; var star = qmark + "*?"; var twoStarDot = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; var twoStarNoDot = "(?:(?!(?:\\/|^)\\.).)*?"; var reSpecials = charSet("().*{}+?[]^$\\!"); function charSet(s$1) { return s$1.split("").reduce(function(set, c) { set[c] = true; return set; }, {}); } var slashSplit = /\/+/; minimatch.filter = filter; function filter(pattern, options) { options = options || {}; return function(p, i, list) { return minimatch(p, pattern, options); }; } function ext(a, b) { b = b || {}; var t = {}; Object.keys(a).forEach(function(k) { t[k] = a[k]; }); Object.keys(b).forEach(function(k) { t[k] = b[k]; }); return t; } minimatch.defaults = function(def) { if (!def || typeof def !== "object" || !Object.keys(def).length) return minimatch; var orig = minimatch; var m$1 = function minimatch$1(p, pattern, options) { return orig(p, pattern, ext(def, options)); }; m$1.Minimatch = function Minimatch$1(pattern, options) { return new orig.Minimatch(pattern, ext(def, options)); }; m$1.Minimatch.defaults = function defaults(options) { return orig.defaults(ext(def, options)).Minimatch; }; m$1.filter = function filter$1(pattern, options) { return orig.filter(pattern, ext(def, options)); }; m$1.defaults = function defaults(options) { return orig.defaults(ext(def, options)); }; m$1.makeRe = function makeRe$1(pattern, options) { return orig.makeRe(pattern, ext(def, options)); }; m$1.braceExpand = function braceExpand$1(pattern, options) { return orig.braceExpand(pattern, ext(def, options)); }; m$1.match = function(list, pattern, options) { return orig.match(list, pattern, ext(def, options)); }; return m$1; }; Minimatch.defaults = function(def) { return minimatch.defaults(def).Minimatch; }; function minimatch(p, pattern, options) { assertValidPattern(pattern); if (!options) options = {}; if (!options.nocomment && pattern.charAt(0) === "#") return false; return new Minimatch(pattern, options).match(p); } function Minimatch(pattern, options) { if (!(this instanceof Minimatch)) return new Minimatch(pattern, options); assertValidPattern(pattern); if (!options) options = {}; pattern = pattern.trim(); if (!options.allowWindowsEscape && path$11.sep !== "/") pattern = pattern.split(path$11.sep).join("/"); this.options = options; this.set = []; this.pattern = pattern; this.regexp = null; this.negate = false; this.comment = false; this.empty = false; this.partial = !!options.partial; this.make(); } Minimatch.prototype.debug = function() {}; Minimatch.prototype.make = make; function make() { var pattern = this.pattern; var options = this.options; if (!options.nocomment && pattern.charAt(0) === "#") { this.comment = true; return; } if (!pattern) { this.empty = true; return; } this.parseNegate(); var set = this.globSet = this.braceExpand(); if (options.debug) this.debug = function debug$3() { console.error.apply(console, arguments); }; this.debug(this.pattern, set); set = this.globParts = set.map(function(s$1) { return s$1.split(slashSplit); }); this.debug(this.pattern, set); set = set.map(function(s$1, si, set$1) { return s$1.map(this.parse, this); }, this); this.debug(this.pattern, set); set = set.filter(function(s$1) { return s$1.indexOf(false) === -1; }); this.debug(this.pattern, set); this.set = set; } Minimatch.prototype.parseNegate = parseNegate; function parseNegate() { var pattern = this.pattern; var negate = false; var options = this.options; var negateOffset = 0; if (options.nonegate) return; for (var i = 0, l = pattern.length; i < l && pattern.charAt(i) === "!"; i++) { negate = !negate; negateOffset++; } if (negateOffset) this.pattern = pattern.substr(negateOffset); this.negate = negate; } minimatch.braceExpand = function(pattern, options) { return braceExpand(pattern, options); }; Minimatch.prototype.braceExpand = braceExpand; function braceExpand(pattern, options) { if (!options) if (this instanceof Minimatch) options = this.options; else options = {}; pattern = typeof pattern === "undefined" ? this.pattern : pattern; assertValidPattern(pattern); if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) return [pattern]; return expand(pattern); } var MAX_PATTERN_LENGTH = 1024 * 64; var assertValidPattern = function(pattern) { if (typeof pattern !== "string") throw new TypeError("invalid pattern"); if (pattern.length > MAX_PATTERN_LENGTH) throw new TypeError("pattern is too long"); }; Minimatch.prototype.parse = parse$2; var SUBPARSE = {}; function parse$2(pattern, isSub) { assertValidPattern(pattern); var options = this.options; if (pattern === "**") if (!options.noglobstar) return GLOBSTAR; else pattern = "*"; if (pattern === "") return ""; var re = ""; var hasMagic = !!options.nocase; var escaping = false; var patternListStack = []; var negativeLists = []; var stateChar; var inClass = false; var reClassStart = -1; var classStart = -1; var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; var self$1 = this; function clearStateChar() { if (stateChar) { switch (stateChar) { case "*": re += star; hasMagic = true; break; case "?": re += qmark; hasMagic = true; break; default: re += "\\" + stateChar; break; } self$1.debug("clearStateChar %j %j", stateChar, re); stateChar = false; } } for (var i = 0, len = pattern.length, c; i < len && (c = pattern.charAt(i)); i++) { this.debug("%s %s %s %j", pattern, i, re, c); if (escaping && reSpecials[c]) { re += "\\" + c; escaping = false; continue; } switch (c) { case "/": return false; case "\\": clearStateChar(); escaping = true; continue; case "?": case "*": case "+": case "@": case "!": this.debug("%s %s %s %j <-- stateChar", pattern, i, re, c); if (inClass) { this.debug(" in class"); if (c === "!" && i === classStart + 1) c = "^"; re += c; continue; } self$1.debug("call clearStateChar %j", stateChar); clearStateChar(); stateChar = c; if (options.noext) clearStateChar(); continue; case "(": if (inClass) { re += "("; continue; } if (!stateChar) { re += "\\("; continue; } patternListStack.push({ type: stateChar, start: i - 1, reStart: re.length, open: plTypes[stateChar].open, close: plTypes[stateChar].close }); re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; this.debug("plType %j %j", stateChar, re); stateChar = false; continue; case ")": if (inClass || !patternListStack.length) { re += "\\)"; continue; } clearStateChar(); hasMagic = true; var pl = patternListStack.pop(); re += pl.close; if (pl.type === "!") negativeLists.push(pl); pl.reEnd = re.length; continue; case "|": if (inClass || !patternListStack.length || escaping) { re += "\\|"; escaping = false; continue; } clearStateChar(); re += "|"; continue; case "[": clearStateChar(); if (inClass) { re += "\\" + c; continue; } inClass = true; classStart = i; reClassStart = re.length; re += c; continue; case "]": if (i === classStart + 1 || !inClass) { re += "\\" + c; escaping = false; continue; } var cs = pattern.substring(classStart + 1, i); try { RegExp("[" + cs + "]"); } catch (er) { var sp = this.parse(cs, SUBPARSE); re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; hasMagic = hasMagic || sp[1]; inClass = false; continue; } hasMagic = true; inClass = false; re += c; continue; default: clearStateChar(); if (escaping) escaping = false; else if (reSpecials[c] && !(c === "^" && inClass)) re += "\\"; re += c; } } if (inClass) { cs = pattern.substr(classStart + 1); sp = this.parse(cs, SUBPARSE); re = re.substr(0, reClassStart) + "\\[" + sp[0]; hasMagic = hasMagic || sp[1]; } for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { var tail = re.slice(pl.reStart + pl.open.length); this.debug("setting tail", re, pl); tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { if (!$2) $2 = "\\"; return $1 + $1 + $2 + "|"; }); this.debug("tail=%j\n %s", tail, tail, pl, re); var t = pl.type === "*" ? star : pl.type === "?" ? qmark : "\\" + pl.type; hasMagic = true; re = re.slice(0, pl.reStart) + t + "\\(" + tail; } clearStateChar(); if (escaping) re += "\\\\"; var addPatternStart = false; switch (re.charAt(0)) { case "[": case ".": case "(": addPatternStart = true; } for (var n = negativeLists.length - 1; n > -1; n--) { var nl = negativeLists[n]; var nlBefore = re.slice(0, nl.reStart); var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); var nlAfter = re.slice(nl.reEnd); nlLast += nlAfter; var openParensBefore = nlBefore.split("(").length - 1; var cleanAfter = nlAfter; for (i = 0; i < openParensBefore; i++) cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); nlAfter = cleanAfter; var dollar = ""; if (nlAfter === "" && isSub !== SUBPARSE) dollar = "$"; var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; re = newRe; } if (re !== "" && hasMagic) re = "(?=.)" + re; if (addPatternStart) re = patternStart + re; if (isSub === SUBPARSE) return [re, hasMagic]; if (!hasMagic) return globUnescape(pattern); var flags = options.nocase ? "i" : ""; try { var regExp = new RegExp("^" + re + "$", flags); } catch (er) { return new RegExp("$."); } regExp._glob = pattern; regExp._src = re; return regExp; } minimatch.makeRe = function(pattern, options) { return new Minimatch(pattern, options || {}).makeRe(); }; Minimatch.prototype.makeRe = makeRe; function makeRe() { if (this.regexp || this.regexp === false) return this.regexp; var set = this.set; if (!set.length) { this.regexp = false; return this.regexp; } var options = this.options; var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot; var flags = options.nocase ? "i" : ""; var re = set.map(function(pattern) { return pattern.map(function(p) { return p === GLOBSTAR ? twoStar : typeof p === "string" ? regExpEscape(p) : p._src; }).join("\\/"); }).join("|"); re = "^(?:" + re + ")$"; if (this.negate) re = "^(?!" + re + ").*$"; try { this.regexp = new RegExp(re, flags); } catch (ex) { this.regexp = false; } return this.regexp; } minimatch.match = function(list, pattern, options) { options = options || {}; var mm = new Minimatch(pattern, options); list = list.filter(function(f) { return mm.match(f); }); if (mm.options.nonull && !list.length) list.push(pattern); return list; }; Minimatch.prototype.match = function match$2(f, partial) { if (typeof partial === "undefined") partial = this.partial; this.debug("match", f, this.pattern); if (this.comment) return false; if (this.empty) return f === ""; if (f === "/" && partial) return true; var options = this.options; if (path$11.sep !== "/") f = f.split(path$11.sep).join("/"); f = f.split(slashSplit); this.debug(this.pattern, "split", f); var set = this.set; this.debug(this.pattern, "set", set); var filename; var i; for (i = f.length - 1; i >= 0; i--) { filename = f[i]; if (filename) break; } for (i = 0; i < set.length; i++) { var pattern = set[i]; var file = f; if (options.matchBase && pattern.length === 1) file = [filename]; var hit = this.matchOne(file, pattern, partial); if (hit) { if (options.flipNegate) return true; return !this.negate; } } if (options.flipNegate) return false; return this.negate; }; Minimatch.prototype.matchOne = function(file, pattern, partial) { var options = this.options; this.debug("matchOne", { "this": this, file, pattern }); this.debug("matchOne", file.length, pattern.length); for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { this.debug("matchOne loop"); var p = pattern[pi]; var f = file[fi]; this.debug(pattern, p, f); /* istanbul ignore if */ if (p === false) return false; if (p === GLOBSTAR) { this.debug("GLOBSTAR", [ pattern, p, f ]); var fr = fi; var pr = pi + 1; if (pr === pl) { this.debug("** at the end"); for (; fi < fl; fi++) if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; return true; } while (fr < fl) { var swallowee = file[fr]; this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { this.debug("globstar found match!", fr, fl, swallowee); return true; } else { if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { this.debug("dot detected!", file, fr, pattern, pr); break; } this.debug("globstar swallow a segment, and continue"); fr++; } } /* istanbul ignore if */ if (partial) { this.debug("\n>>> no match, partial?", file, fr, pattern, pr); if (fr === fl) return true; } return false; } var hit; if (typeof p === "string") { hit = f === p; this.debug("string match", p, f, hit); } else { hit = f.match(p); this.debug("pattern match", p, f, hit); } if (!hit) return false; } if (fi === fl && pi === pl) return true; else if (fi === fl) return partial; else if (pi === pl) return fi === fl - 1 && file[fi] === ""; /* istanbul ignore next */ throw new Error("wtf?"); }; function globUnescape(s$1) { return s$1.replace(/\\(.)/g, "$1"); } function regExpEscape(s$1) { return s$1.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); } } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-path.js var require_internal_path$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-path.js"(exports) { var __createBinding$22 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$21 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$22 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$22(result, mod, k); } __setModuleDefault$21(result, mod); return result; }; var __importDefault$8 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Path = void 0; const path$10 = __importStar$22(__require("path")); const pathHelper$4 = __importStar$22(require_internal_path_helper$1()); const assert_1$10 = __importDefault$8(__require("assert")); const IS_WINDOWS$8 = process.platform === "win32"; /** * Helper class for parsing paths into segments */ var Path$1 = class { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; if (typeof itemPath === "string") { assert_1$10.default(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper$4.safeTrimTrailingSeparator(itemPath); if (!pathHelper$4.hasRoot(itemPath)) this.segments = itemPath.split(path$10.sep); else { let remaining = itemPath; let dir = pathHelper$4.dirname(remaining); while (dir !== remaining) { const basename = path$10.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper$4.dirname(remaining); } this.segments.unshift(remaining); } } else { assert_1$10.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; assert_1$10.default(segment, `Parameter 'itemPath' must not contain any empty segments`); segment = pathHelper$4.normalizeSeparators(itemPath[i]); if (i === 0 && pathHelper$4.hasRoot(segment)) { segment = pathHelper$4.safeTrimTrailingSeparator(segment); assert_1$10.default(segment === pathHelper$4.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { assert_1$10.default(!segment.includes(path$10.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { let result = this.segments[0]; let skipSlash = result.endsWith(path$10.sep) || IS_WINDOWS$8 && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) skipSlash = false; else result += path$10.sep; result += this.segments[i]; } return result; } }; exports.Path = Path$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-pattern.js var require_internal_pattern$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-pattern.js"(exports) { var __createBinding$21 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$20 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$21 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$21(result, mod, k); } __setModuleDefault$20(result, mod); return result; }; var __importDefault$7 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Pattern = void 0; const os$3 = __importStar$21(__require("os")); const path$9 = __importStar$21(__require("path")); const pathHelper$3 = __importStar$21(require_internal_path_helper$1()); const assert_1$9 = __importDefault$7(__require("assert")); const minimatch_1$1 = require_minimatch(); const internal_match_kind_1$4 = require_internal_match_kind$1(); const internal_path_1$1 = require_internal_path$1(); const IS_WINDOWS$7 = process.platform === "win32"; var Pattern$1 = class Pattern$1 { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; let pattern; if (typeof patternOrNegate === "string") pattern = patternOrNegate.trim(); else { segments = segments || []; assert_1$9.default(segments.length, `Parameter 'segments' must not empty`); const root = Pattern$1.getLiteral(segments[0]); assert_1$9.default(root && pathHelper$3.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1$1.Path(segments).toString().trim(); if (patternOrNegate) pattern = `!${pattern}`; } while (pattern.startsWith("!")) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } pattern = Pattern$1.fixupPattern(pattern, homedir); this.segments = new internal_path_1$1.Path(pattern).segments; this.trailingSeparator = pathHelper$3.normalizeSeparators(pattern).endsWith(path$9.sep); pattern = pathHelper$3.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => Pattern$1.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); this.searchPath = new internal_path_1$1.Path(searchSegments).toString(); this.rootRegExp = new RegExp(Pattern$1.regExpEscape(searchSegments[0]), IS_WINDOWS$7 ? "i" : ""); this.isImplicitPattern = isImplicitPattern; const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS$7, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS$7 ? pattern.replace(/\\/g, "/") : pattern; this.minimatch = new minimatch_1$1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper$3.normalizeSeparators(itemPath); if (!itemPath.endsWith(path$9.sep) && this.isImplicitPattern === false) itemPath = `${itemPath}${path$9.sep}`; } else itemPath = pathHelper$3.safeTrimTrailingSeparator(itemPath); if (this.minimatch.match(itemPath)) return this.trailingSeparator ? internal_match_kind_1$4.MatchKind.Directory : internal_match_kind_1$4.MatchKind.All; return internal_match_kind_1$4.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { itemPath = pathHelper$3.safeTrimTrailingSeparator(itemPath); if (pathHelper$3.dirname(itemPath) === itemPath) return this.rootRegExp.test(itemPath); return this.minimatch.matchOne(itemPath.split(IS_WINDOWS$7 ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s$1) { return (IS_WINDOWS$7 ? s$1 : s$1.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { assert_1$9.default(pattern, "pattern cannot be empty"); const literalSegments = new internal_path_1$1.Path(pattern).segments.map((x) => Pattern$1.getLiteral(x)); assert_1$9.default(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); assert_1$9.default(!pathHelper$3.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper$3.normalizeSeparators(pattern); if (pattern === "." || pattern.startsWith(`.${path$9.sep}`)) pattern = Pattern$1.globEscape(process.cwd()) + pattern.substr(1); else if (pattern === "~" || pattern.startsWith(`~${path$9.sep}`)) { homedir = homedir || os$3.homedir(); assert_1$9.default(homedir, "Unable to determine HOME directory"); assert_1$9.default(pathHelper$3.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern$1.globEscape(homedir) + pattern.substr(1); } else if (IS_WINDOWS$7 && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper$3.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith("\\")) root += "\\"; pattern = Pattern$1.globEscape(root) + pattern.substr(2); } else if (IS_WINDOWS$7 && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { let root = pathHelper$3.ensureAbsoluteRoot("C:\\dummy-root", "\\"); if (!root.endsWith("\\")) root += "\\"; pattern = Pattern$1.globEscape(root) + pattern.substr(1); } else pattern = pathHelper$3.ensureAbsoluteRoot(Pattern$1.globEscape(process.cwd()), pattern); return pathHelper$3.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ""; for (let i = 0; i < segment.length; i++) { const c = segment[i]; if (c === "\\" && !IS_WINDOWS$7 && i + 1 < segment.length) { literal += segment[++i]; continue; } else if (c === "*" || c === "?") return ""; else if (c === "[" && i + 1 < segment.length) { let set = ""; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; if (c2 === "\\" && !IS_WINDOWS$7 && i2 + 1 < segment.length) { set += segment[++i2]; continue; } else if (c2 === "]") { closed = i2; break; } else set += c2; } if (closed >= 0) { if (set.length > 1) return ""; if (set) { literal += set; i = closed; continue; } } } literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s$1) { return s$1.replace(/[[\\^$.|?*+()]/g, "\\$&"); } }; exports.Pattern = Pattern$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-search-state.js var require_internal_search_state$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-search-state.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.SearchState = void 0; var SearchState$1 = class { constructor(path$13, level) { this.path = path$13; this.level = level; } }; exports.SearchState = SearchState$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-globber.js var require_internal_globber$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/internal-globber.js"(exports) { var __createBinding$20 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m$1[k]; } }); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$19 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$20 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding$20(result, mod, k); } __setModuleDefault$19(result, mod); return result; }; var __awaiter$18 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues$4 = exports && exports.__asyncValues || function(o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m$1 = o[Symbol.asyncIterator], i; return m$1 ? m$1.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i); function verb(n) { i[n] = o[n] && function(v) { return new Promise(function(resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d$1, v) { Promise.resolve(v).then(function(v$1) { resolve({ value: v$1, done: d$1 }); }, reject); } }; var __await$2 = exports && exports.__await || function(v) { return this instanceof __await$2 ? (this.v = v, this) : new __await$2(v); }; var __asyncGenerator$2 = exports && exports.__asyncGenerator || function(thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i; function verb(n) { if (g[n]) i[n] = function(v) { return new Promise(function(a, b) { q.push([ n, v, a, b ]) > 1 || resume$1(n, v); }); }; } function resume$1(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await$2 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume$1("next", value); } function reject(value) { resume$1("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume$1(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", { value: true }); exports.DefaultGlobber = void 0; const core$10 = __importStar$20(require_core()); const fs$6 = __importStar$20(__require("fs")); const globOptionsHelper$1 = __importStar$20(require_internal_glob_options_helper$1()); const path$8 = __importStar$20(__require("path")); const patternHelper$1 = __importStar$20(require_internal_pattern_helper$1()); const internal_match_kind_1$3 = require_internal_match_kind$1(); const internal_pattern_1$1 = require_internal_pattern$1(); const internal_search_state_1$1 = require_internal_search_state$1(); const IS_WINDOWS$6 = process.platform === "win32"; var DefaultGlobber$1 = class DefaultGlobber$1 { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper$1.getOptions(options); } getSearchPaths() { return this.searchPaths.slice(); } glob() { var e_1, _a$2; return __awaiter$18(this, void 0, void 0, function* () { const result = []; try { for (var _b$1 = __asyncValues$4(this.globGenerator()), _c$1; _c$1 = yield _b$1.next(), !_c$1.done;) { const itemPath = _c$1.value; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_c$1 && !_c$1.done && (_a$2 = _b$1.return)) yield _a$2.call(_b$1); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator$2(this, arguments, function* globGenerator_1() { const options = globOptionsHelper$1.getOptions(this.options); const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) patterns.push(new internal_pattern_1$1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); } const stack = []; for (const searchPath of patternHelper$1.getSearchPaths(patterns)) { core$10.debug(`Search path '${searchPath}'`); try { yield __await$2(fs$6.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") continue; throw err; } stack.unshift(new internal_search_state_1$1.SearchState(searchPath, 1)); } const traversalChain = []; while (stack.length) { const item = stack.pop(); const match$2 = patternHelper$1.match(patterns, item.path); const partialMatch$2 = !!match$2 || patternHelper$1.partialMatch(patterns, item.path); if (!match$2 && !partialMatch$2) continue; const stats = yield __await$2( DefaultGlobber$1.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); if (!stats) continue; if (stats.isDirectory()) { if (match$2 & internal_match_kind_1$3.MatchKind.Directory) yield yield __await$2(item.path); else if (!partialMatch$2) continue; const childLevel = item.level + 1; const childItems = (yield __await$2(fs$6.promises.readdir(item.path))).map((x) => new internal_search_state_1$1.SearchState(path$8.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match$2 & internal_match_kind_1$3.MatchKind.File) yield yield __await$2(item.path); } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter$18(this, void 0, void 0, function* () { const result = new DefaultGlobber$1(options); if (IS_WINDOWS$6) { patterns = patterns.replace(/\r\n/g, "\n"); patterns = patterns.replace(/\r/g, "\n"); } const lines = patterns.split("\n").map((x) => x.trim()); for (const line of lines) if (!line || line.startsWith("#")) continue; else result.patterns.push(new internal_pattern_1$1.Pattern(line)); result.searchPaths.push(...patternHelper$1.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter$18(this, void 0, void 0, function* () { let stats; if (options.followSymbolicLinks) try { stats = yield fs$6.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { core$10.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } else stats = yield fs$6.promises.lstat(item.path); if (stats.isDirectory() && options.followSymbolicLinks) { const realPath = yield fs$6.promises.realpath(item.path); while (traversalChain.length >= item.level) traversalChain.pop(); if (traversalChain.some((x) => x === realPath)) { core$10.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); } return stats; }); } }; exports.DefaultGlobber = DefaultGlobber$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/glob.js var require_glob$1 = __commonJS({ "node_modules/.deno/@actions+glob@0.1.2/node_modules/@actions/glob/lib/glob.js"(exports) { var __awaiter$17 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.create = void 0; const internal_globber_1$1 = require_internal_globber$1(); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create$1(patterns, options) { return __awaiter$17(this, void 0, void 0, function* () { return yield internal_globber_1$1.DefaultGlobber.create(patterns, options); }); } exports.create = create$1; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/constants.js var require_constants$3 = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/constants.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function(CacheFilename$1) { CacheFilename$1["Gzip"] = "cache.tgz"; CacheFilename$1["Zstd"] = "cache.tzst"; })(CacheFilename || (exports.CacheFilename = CacheFilename = {})); var CompressionMethod; (function(CompressionMethod$1) { CompressionMethod$1["Gzip"] = "gzip"; CompressionMethod$1["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod$1["Zstd"] = "zstd"; })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {})); var ArchiveToolType; (function(ArchiveToolType$1) { ArchiveToolType$1["GNU"] = "gnu"; ArchiveToolType$1["BSD"] = "bsd"; })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {})); exports.DefaultRetryAttempts = 2; exports.DefaultRetryDelay = 5e3; exports.SocketTimeout = 5e3; exports.GnuTarPathOnWindows = `${process.env["PROGRAMFILES"]}\\Git\\usr\\bin\\tar.exe`; exports.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; exports.TarFilename = "cache.tar"; exports.ManifestFilename = "manifest.txt"; exports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/cacheUtils.js var require_cacheUtils = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/cacheUtils.js"(exports) { var __createBinding$19 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$18 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$19 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$19(result, mod, k); } __setModuleDefault$18(result, mod); return result; }; var __awaiter$16 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues$3 = exports && exports.__asyncValues || function(o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m$1 = o[Symbol.asyncIterator], i; return m$1 ? m$1.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i); function verb(n) { i[n] = o[n] && function(v) { return new Promise(function(resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d$1, v) { Promise.resolve(v).then(function(v$1) { resolve({ value: v$1, done: d$1 }); }, reject); } }; Object.defineProperty(exports, "__esModule", { value: true }); exports.getRuntimeToken = exports.getCacheVersion = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core$9 = __importStar$19(require_core()); const exec$1 = __importStar$19(require_exec()); const glob = __importStar$19(require_glob$1()); const io$1 = __importStar$19(require_io()); const crypto$2 = __importStar$19(__require("crypto")); const fs$5 = __importStar$19(__require("fs")); const path$7 = __importStar$19(__require("path")); const semver = __importStar$19(require_semver()); const util$4 = __importStar$19(__require("util")); const constants_1$4 = require_constants$3(); const versionSalt = "1.0"; function createTempDirectory() { return __awaiter$16(this, void 0, void 0, function* () { const IS_WINDOWS$11 = process.platform === "win32"; let tempDirectory = process.env["RUNNER_TEMP"] || ""; if (!tempDirectory) { let baseLocation; if (IS_WINDOWS$11) baseLocation = process.env["USERPROFILE"] || "C:\\"; else if (process.platform === "darwin") baseLocation = "/Users"; else baseLocation = "/home"; tempDirectory = path$7.join(baseLocation, "actions", "temp"); } const dest = path$7.join(tempDirectory, crypto$2.randomUUID()); yield io$1.mkdirP(dest); return dest; }); } exports.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { return fs$5.statSync(filePath).size; } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { var _a$2, e_1, _b$1, _c$1; var _d$1; return __awaiter$16(this, void 0, void 0, function* () { const paths = []; const workspace = (_d$1 = process.env["GITHUB_WORKSPACE"]) !== null && _d$1 !== void 0 ? _d$1 : process.cwd(); const globber = yield glob.create(patterns.join("\n"), { implicitDescendants: false }); try { for (var _e = true, _f = __asyncValues$3(globber.globGenerator()), _g; _g = yield _f.next(), _a$2 = _g.done, !_a$2; _e = true) { _c$1 = _g.value; _e = false; const file = _c$1; const relativeFile = path$7.relative(workspace, file).replace(new RegExp(`\\${path$7.sep}`, "g"), "/"); core$9.debug(`Matched: ${relativeFile}`); if (relativeFile === "") paths.push("."); else paths.push(`${relativeFile}`); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_e && !_a$2 && (_b$1 = _f.return)) yield _b$1.call(_f); } finally { if (e_1) throw e_1.error; } } return paths; }); } exports.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter$16(this, void 0, void 0, function* () { return util$4.promisify(fs$5.unlink)(filePath); }); } exports.unlinkFile = unlinkFile; function getVersion(app, additionalArgs = []) { return __awaiter$16(this, void 0, void 0, function* () { let versionOutput = ""; additionalArgs.push("--version"); core$9.debug(`Checking ${app} ${additionalArgs.join(" ")}`); try { yield exec$1.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { stdout: (data) => versionOutput += data.toString(), stderr: (data) => versionOutput += data.toString() } }); } catch (err) { core$9.debug(err.message); } versionOutput = versionOutput.trim(); core$9.debug(versionOutput); return versionOutput; }); } function getCompressionMethod() { return __awaiter$16(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); const version$1 = semver.clean(versionOutput); core$9.debug(`zstd version: ${version$1}`); if (versionOutput === "") return constants_1$4.CompressionMethod.Gzip; else return constants_1$4.CompressionMethod.ZstdWithoutLong; }); } exports.getCompressionMethod = getCompressionMethod; function getCacheFileName(compressionMethod) { return compressionMethod === constants_1$4.CompressionMethod.Gzip ? constants_1$4.CacheFilename.Gzip : constants_1$4.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter$16(this, void 0, void 0, function* () { if (fs$5.existsSync(constants_1$4.GnuTarPathOnWindows)) return constants_1$4.GnuTarPathOnWindows; const versionOutput = yield getVersion("tar"); return versionOutput.toLowerCase().includes("gnu tar") ? io$1.which("tar") : ""; }); } exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === void 0) throw Error(`Expected ${name} but value was undefiend`); return value; } exports.assertDefined = assertDefined; function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { const components = paths.slice(); if (compressionMethod) components.push(compressionMethod); if (process.platform === "win32" && !enableCrossOsArchive) components.push("windows-only"); components.push(versionSalt); return crypto$2.createHash("sha256").update(components.join("|")).digest("hex"); } exports.getCacheVersion = getCacheVersion; function getRuntimeToken() { const token = process.env["ACTIONS_RUNTIME_TOKEN"]; if (!token) throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); return token; } exports.getRuntimeToken = getRuntimeToken; } }); //#endregion //#region node_modules/.deno/tslib@2.8.1/node_modules/tslib/tslib.js var require_tslib = __commonJS({ "node_modules/.deno/tslib@2.8.1/node_modules/tslib/tslib.js"(exports, module) { /****************************************************************************** Copyright (c) Microsoft Corporation. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ***************************************************************************** */ var __extends; var __assign; var __rest; var __decorate; var __param; var __esDecorate; var __runInitializers; var __propKey; var __setFunctionName; var __metadata; var __awaiter$15; var __generator; var __exportStar$1; var __values$1; var __read; var __spread; var __spreadArrays; var __spreadArray; var __await$1; var __asyncGenerator$1; var __asyncDelegator; var __asyncValues$2; var __makeTemplateObject; var __importStar$18; var __importDefault$6; var __classPrivateFieldGet; var __classPrivateFieldSet; var __classPrivateFieldIn; var __createBinding$18; var __addDisposableResource; var __disposeResources; var __rewriteRelativeImportExtension; (function(factory) { var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; if (typeof define === "function" && define.amd) define("tslib", ["exports"], function(exports$1) { factory(createExporter(root, createExporter(exports$1))); }); else if (typeof module === "object" && typeof module.exports === "object") factory(createExporter(root, createExporter(module.exports))); else factory(createExporter(root)); function createExporter(exports$1, previous) { if (exports$1 !== root) if (typeof Object.create === "function") Object.defineProperty(exports$1, "__esModule", { value: true }); else exports$1.__esModule = true; return function(id, v) { return exports$1[id] = previous ? previous(id, v) : v; }; } })(function(exporter) { var extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d$1, b) { d$1.__proto__ = b; } || function(d$1, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d$1[p] = b[p]; }; __extends = function(d$1, b) { if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); extendStatics(d$1, b); function __() { this.constructor = d$1; } d$1.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; __assign = Object.assign || function(t) { for (var s$1, i = 1, n = arguments.length; i < n; i++) { s$1 = arguments[i]; for (var p in s$1) if (Object.prototype.hasOwnProperty.call(s$1, p)) t[p] = s$1[p]; } return t; }; __rest = function(s$1, e) { var t = {}; for (var p in s$1) if (Object.prototype.hasOwnProperty.call(s$1, p) && e.indexOf(p) < 0) t[p] = s$1[p]; if (s$1 != null && typeof Object.getOwnPropertySymbols === "function") { for (var i = 0, p = Object.getOwnPropertySymbols(s$1); i < p.length; i++) if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s$1, p[i])) t[p[i]] = s$1[p[i]]; } return t; }; __decorate = function(decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d$1; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d$1 = decorators[i]) r = (c < 3 ? d$1(r) : c > 3 ? d$1(target, key, r) : d$1(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; __param = function(paramIndex, decorator) { return function(target, key) { decorator(target, key, paramIndex); }; }; __esDecorate = function(ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { function accept$1(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { var context$2 = {}; for (var p in contextIn) context$2[p] = p === "access" ? {} : contextIn[p]; for (var p in contextIn.access) context$2.access[p] = contextIn.access[p]; context$2.addInitializer = function(f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept$1(f || null)); }; var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context$2); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); if (_ = accept$1(result.get)) descriptor.get = _; if (_ = accept$1(result.set)) descriptor.set = _; if (_ = accept$1(result.init)) initializers.unshift(_); } else if (_ = accept$1(result)) if (kind === "field") initializers.unshift(_); else descriptor[key] = _; } if (target) Object.defineProperty(target, contextIn.name, descriptor); done = true; }; __runInitializers = function(thisArg, initializers, value) { var useValue = arguments.length > 2; for (var i = 0; i < initializers.length; i++) value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); return useValue ? value : void 0; }; __propKey = function(x) { return typeof x === "symbol" ? x : "".concat(x); }; __setFunctionName = function(f, name, prefix$1) { if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; return Object.defineProperty(f, "name", { configurable: true, value: prefix$1 ? "".concat(prefix$1, " ", name) : name }); }; __metadata = function(metadataKey, metadataValue) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); }; __awaiter$15 = function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; __generator = function(thisArg, body$1) { var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y$1, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; function verb(n) { return function(v) { return step([n, v]); }; } function step(op) { if (f) throw new TypeError("Generator is already executing."); while (g && (g = 0, op[0] && (_ = 0)), _) try { if (f = 1, y$1 && (t = op[0] & 2 ? y$1["return"] : op[0] ? y$1["throw"] || ((t = y$1["return"]) && t.call(y$1), 0) : y$1.next) && !(t = t.call(y$1, op[1])).done) return t; if (y$1 = 0, t) op = [op[0] & 2, t.value]; switch (op[0]) { case 0: case 1: t = op; break; case 4: _.label++; return { value: op[1], done: false }; case 5: _.label++; y$1 = op[1]; op = [0]; continue; case 7: op = _.ops.pop(); _.trys.pop(); continue; default: if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { _.label = op[1]; break; } if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } if (t[2]) _.ops.pop(); _.trys.pop(); continue; } op = body$1.call(thisArg, _); } catch (e) { op = [6, e]; y$1 = 0; } finally { f = t = 0; } if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; } }; __exportStar$1 = function(m$1, o) { for (var p in m$1) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding$18(o, m$1, p); }; __createBinding$18 = Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }; __values$1 = function(o) { var s$1 = typeof Symbol === "function" && Symbol.iterator, m$1 = s$1 && o[s$1], i = 0; if (m$1) return m$1.call(o); if (o && typeof o.length === "number") return { next: function() { if (o && i >= o.length) o = void 0; return { value: o && o[i++], done: !o }; } }; throw new TypeError(s$1 ? "Object is not iterable." : "Symbol.iterator is not defined."); }; __read = function(o, n) { var m$1 = typeof Symbol === "function" && o[Symbol.iterator]; if (!m$1) return o; var i = m$1.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error }; } finally { try { if (r && !r.done && (m$1 = i["return"])) m$1.call(i); } finally { if (e) throw e.error; } } return ar; }; /** @deprecated */ __spread = function() { for (var ar = [], i = 0; i < arguments.length; i++) ar = ar.concat(__read(arguments[i])); return ar; }; /** @deprecated */ __spreadArrays = function() { for (var s$1 = 0, i = 0, il = arguments.length; i < il; i++) s$1 += arguments[i].length; for (var r = Array(s$1), k = 0, i = 0; i < il; i++) for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) r[k] = a[j]; return r; }; __spreadArray = function(to, from, pack) { if (pack || arguments.length === 2) { for (var i = 0, l = from.length, ar; i < l; i++) if (ar || !(i in from)) { if (!ar) ar = Array.prototype.slice.call(from, 0, i); ar[i] = from[i]; } } return to.concat(ar || Array.prototype.slice.call(from)); }; __await$1 = function(v) { return this instanceof __await$1 ? (this.v = v, this) : new __await$1(v); }; __asyncGenerator$1 = function(thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function() { return this; }, i; function awaitReturn(f) { return function(v) { return Promise.resolve(v).then(f, reject); }; } function verb(n, f) { if (g[n]) { i[n] = function(v) { return new Promise(function(a, b) { q.push([ n, v, a, b ]) > 1 || resume$1(n, v); }); }; if (f) i[n] = f(i[n]); } } function resume$1(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await$1 ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume$1("next", value); } function reject(value) { resume$1("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume$1(q[0][0], q[0][1]); } }; __asyncDelegator = function(o) { var i, p; return i = {}, verb("next"), verb("throw", function(e) { throw e; }), verb("return"), i[Symbol.iterator] = function() { return this; }, i; function verb(n, f) { i[n] = o[n] ? function(v) { return (p = !p) ? { value: __await$1(o[n](v)), done: false } : f ? f(v) : v; } : f; } }; __asyncValues$2 = function(o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m$1 = o[Symbol.asyncIterator], i; return m$1 ? m$1.call(o) : (o = typeof __values$1 === "function" ? __values$1(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i); function verb(n) { i[n] = o[n] && function(v) { return new Promise(function(resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d$1, v) { Promise.resolve(v).then(function(v$1) { resolve({ value: v$1, done: d$1 }); }, reject); } }; __makeTemplateObject = function(cooked, raw) { if (Object.defineProperty) Object.defineProperty(cooked, "raw", { value: raw }); else cooked.raw = raw; return cooked; }; var __setModuleDefault$25 = Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }; var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function(o$1) { var ar = []; for (var k in o$1) if (Object.prototype.hasOwnProperty.call(o$1, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; __importStar$18 = function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding$18(result, mod, k[i]); } __setModuleDefault$25(result, mod); return result; }; __importDefault$6 = function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; __classPrivateFieldGet = function(receiver, state$1, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state$1 === "function" ? receiver !== state$1 || !f : !state$1.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state$1.get(receiver); }; __classPrivateFieldSet = function(receiver, state$1, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state$1 === "function" ? receiver !== state$1 || !f : !state$1.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state$1.set(receiver, value), value; }; __classPrivateFieldIn = function(state$1, receiver) { if (receiver === null || typeof receiver !== "object" && typeof receiver !== "function") throw new TypeError("Cannot use 'in' operator on non-object"); return typeof state$1 === "function" ? receiver === state$1 : state$1.has(receiver); }; __addDisposableResource = function(env, value, async) { if (value !== null && value !== void 0) { if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); var dispose, inner; if (async) { if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); dispose = value[Symbol.asyncDispose]; } if (dispose === void 0) { if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); dispose = value[Symbol.dispose]; if (async) inner = dispose; } if (typeof dispose !== "function") throw new TypeError("Object not disposable."); if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; env.stack.push({ value, dispose, async }); } else if (async) env.stack.push({ async: true }); return value; }; var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) { var e = new Error(message); return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; }; __disposeResources = function(env) { function fail(e) { env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; env.hasError = true; } var r, s$1 = 0; function next() { while (r = env.stack.pop()) try { if (!r.async && s$1 === 1) return s$1 = 0, env.stack.push(r), Promise.resolve().then(next); if (r.dispose) { var result = r.dispose.call(r.value); if (r.async) return s$1 |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); } else s$1 |= 1; } catch (e) { fail(e); } if (s$1 === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); if (env.hasError) throw env.error; } return next(); }; __rewriteRelativeImportExtension = function(path$13, preserveJsx) { if (typeof path$13 === "string" && /^\.\.?\//.test(path$13)) return path$13.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function(m$1, tsx, d$1, ext$1, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d$1 && (!ext$1 || !cm) ? m$1 : d$1 + ext$1 + "." + cm.toLowerCase() + "js"; }); return path$13; }; exporter("__extends", __extends); exporter("__assign", __assign); exporter("__rest", __rest); exporter("__decorate", __decorate); exporter("__param", __param); exporter("__esDecorate", __esDecorate); exporter("__runInitializers", __runInitializers); exporter("__propKey", __propKey); exporter("__setFunctionName", __setFunctionName); exporter("__metadata", __metadata); exporter("__awaiter", __awaiter$15); exporter("__generator", __generator); exporter("__exportStar", __exportStar$1); exporter("__createBinding", __createBinding$18); exporter("__values", __values$1); exporter("__read", __read); exporter("__spread", __spread); exporter("__spreadArrays", __spreadArrays); exporter("__spreadArray", __spreadArray); exporter("__await", __await$1); exporter("__asyncGenerator", __asyncGenerator$1); exporter("__asyncDelegator", __asyncDelegator); exporter("__asyncValues", __asyncValues$2); exporter("__makeTemplateObject", __makeTemplateObject); exporter("__importStar", __importStar$18); exporter("__importDefault", __importDefault$6); exporter("__classPrivateFieldGet", __classPrivateFieldGet); exporter("__classPrivateFieldSet", __classPrivateFieldSet); exporter("__classPrivateFieldIn", __classPrivateFieldIn); exporter("__addDisposableResource", __addDisposableResource); exporter("__disposeResources", __disposeResources); exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); }); } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js var require_AbortError$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/abort-controller/AbortError.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AbortError = void 0; /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts snippet:ReadmeSampleAbortError * import { AbortError } from "@typespec/ts-http-runtime"; * * async function doAsyncWork(options: { abortSignal: AbortSignal }): Promise { * if (options.abortSignal.aborted) { * throw new AbortError(); * } * * // do async work * } * * const controller = new AbortController(); * controller.abort(); * * try { * doAsyncWork({ abortSignal: controller.signal }); * } catch (e) { * if (e instanceof Error && e.name === "AbortError") { * // handle abort error here. * } * } * ``` */ var AbortError$2 = class extends Error { constructor(message) { super(message); this.name = "AbortError"; } }; exports.AbortError = AbortError$2; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js var require_log$3 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/log.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.log = log$1; const tslib_1$9 = require_tslib(); const node_os_1 = __require("node:os"); const node_util_1$1 = tslib_1$9.__importDefault(__require("node:util")); const process$4 = tslib_1$9.__importStar(__require("node:process")); function log$1(message, ...args) { process$4.stderr.write(`${node_util_1$1.default.format(message, ...args)}${node_os_1.EOL}`); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js var require_debug = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/debug.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); const log_js_1$10 = require_log$3(); const debugEnvVariable = typeof process !== "undefined" && process.env && process.env.DEBUG || void 0; let enabledString; let enabledNamespaces = []; let skippedNamespaces = []; const debuggers = []; if (debugEnvVariable) enable(debugEnvVariable); const debugObj = Object.assign((namespace) => { return createDebugger(namespace); }, { enable, enabled, disable, log: log_js_1$10.log }); function enable(namespaces) { enabledString = namespaces; enabledNamespaces = []; skippedNamespaces = []; const wildcard = /\*/g; const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); for (const ns of namespaceList) if (ns.startsWith("-")) skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); else enabledNamespaces.push(new RegExp(`^${ns}$`)); for (const instance of debuggers) instance.enabled = enabled(instance.namespace); } function enabled(namespace) { if (namespace.endsWith("*")) return true; for (const skipped of skippedNamespaces) if (skipped.test(namespace)) return false; for (const enabledNamespace of enabledNamespaces) if (enabledNamespace.test(namespace)) return true; return false; } function disable() { const result = enabledString || ""; enable(""); return result; } function createDebugger(namespace) { const newDebugger = Object.assign(debug$3, { enabled: enabled(namespace), destroy, log: debugObj.log, namespace, extend }); function debug$3(...args) { if (!newDebugger.enabled) return; if (args.length > 0) args[0] = `${namespace} ${args[0]}`; newDebugger.log(...args); } debuggers.push(newDebugger); return newDebugger; } function destroy() { const index = debuggers.indexOf(this); if (index >= 0) { debuggers.splice(index, 1); return true; } return false; } function extend(namespace) { const newDebugger = createDebugger(`${this.namespace}:${namespace}`); newDebugger.log = this.log; return newDebugger; } exports.default = debugObj; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js var require_logger$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/logger.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.TypeSpecRuntimeLogger = void 0; exports.createLoggerContext = createLoggerContext; exports.setLogLevel = setLogLevel$1; exports.getLogLevel = getLogLevel$1; exports.createClientLogger = createClientLogger$1; const tslib_1$8 = require_tslib(); const debug_js_1 = tslib_1$8.__importDefault(require_debug()); const TYPESPEC_RUNTIME_LOG_LEVELS = [ "verbose", "info", "warning", "error" ]; const levelMap = { verbose: 400, info: 300, warning: 200, error: 100 }; function patchLogMethod(parent, child) { child.log = (...args) => { parent.log(...args); }; } function isTypeSpecRuntimeLogLevel(level) { return TYPESPEC_RUNTIME_LOG_LEVELS.includes(level); } /** * Creates a logger context base on the provided options. * @param options - The options for creating a logger context. * @returns The logger context. */ function createLoggerContext(options) { const registeredLoggers = new Set(); const logLevelFromEnv = typeof process !== "undefined" && process.env && process.env[options.logLevelEnvVarName] || void 0; let logLevel; const clientLogger = (0, debug_js_1.default)(options.namespace); clientLogger.log = (...args) => { debug_js_1.default.log(...args); }; if (logLevelFromEnv) if (isTypeSpecRuntimeLogLevel(logLevelFromEnv)) setLogLevel$1(logLevelFromEnv); else console.error(`${options.logLevelEnvVarName} set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(", ")}.`); function shouldEnable(logger$2) { return Boolean(logLevel && levelMap[logger$2.level] <= levelMap[logLevel]); } function createLogger(parent, level) { const logger$2 = Object.assign(parent.extend(level), { level }); patchLogMethod(parent, logger$2); if (shouldEnable(logger$2)) { const enabledNamespaces$1 = debug_js_1.default.disable(); debug_js_1.default.enable(enabledNamespaces$1 + "," + logger$2.namespace); } registeredLoggers.add(logger$2); return logger$2; } return { setLogLevel(level) { if (level && !isTypeSpecRuntimeLogLevel(level)) throw new Error(`Unknown log level '${level}'. Acceptable values: ${TYPESPEC_RUNTIME_LOG_LEVELS.join(",")}`); logLevel = level; const enabledNamespaces$1 = []; for (const logger$2 of registeredLoggers) if (shouldEnable(logger$2)) enabledNamespaces$1.push(logger$2.namespace); debug_js_1.default.enable(enabledNamespaces$1.join(",")); }, getLogLevel() { return logLevel; }, createClientLogger(namespace) { const clientRootLogger = clientLogger.extend(namespace); patchLogMethod(clientLogger, clientRootLogger); return { error: createLogger(clientRootLogger, "error"), warning: createLogger(clientRootLogger, "warning"), info: createLogger(clientRootLogger, "info"), verbose: createLogger(clientRootLogger, "verbose") }; }, logger: clientLogger }; } const context$1 = createLoggerContext({ logLevelEnvVarName: "TYPESPEC_RUNTIME_LOG_LEVEL", namespace: "typeSpecRuntime" }); /** * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. * @param level - The log level to enable for logging. * Options from most verbose to least verbose are: * - verbose * - info * - warning * - error */ exports.TypeSpecRuntimeLogger = context$1.logger; /** * Retrieves the currently specified log level. */ function setLogLevel$1(logLevel) { context$1.setLogLevel(logLevel); } /** * Retrieves the currently specified log level. */ function getLogLevel$1() { return context$1.getLogLevel(); } /** * Creates a logger for use by the SDKs that inherits from `TypeSpecRuntimeLogger`. * @param namespace - The name of the SDK package. * @hidden */ function createClientLogger$1(namespace) { return context$1.createClientLogger(namespace); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js var require_httpHeaders$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/httpHeaders.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createHttpHeaders = createHttpHeaders$1; function normalizeName(name) { return name.toLowerCase(); } function* headerIterator(map) { for (const entry of map.values()) yield [entry.name, entry.value]; } var HttpHeadersImpl = class { constructor(rawHeaders) { this._headersMap = new Map(); if (rawHeaders) for (const headerName of Object.keys(rawHeaders)) this.set(headerName, rawHeaders[headerName]); } /** * Set a header in this collection with the provided name and value. The name is * case-insensitive. * @param name - The name of the header to set. This value is case-insensitive. * @param value - The value of the header to set. */ set(name, value) { this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); } /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param name - The name of the header. This value is case-insensitive. */ get(name) { var _a$2; return (_a$2 = this._headersMap.get(normalizeName(name))) === null || _a$2 === void 0 ? void 0 : _a$2.value; } /** * Get whether or not this header collection contains a header entry for the provided header name. * @param name - The name of the header to set. This value is case-insensitive. */ has(name) { return this._headersMap.has(normalizeName(name)); } /** * Remove the header with the provided headerName. * @param name - The name of the header to remove. */ delete(name) { this._headersMap.delete(normalizeName(name)); } /** * Get the JSON object representation of this HTTP header collection. */ toJSON(options = {}) { const result = {}; if (options.preserveCase) for (const entry of this._headersMap.values()) result[entry.name] = entry.value; else for (const [normalizedName, entry] of this._headersMap) result[normalizedName] = entry.value; return result; } /** * Get the string representation of this HTTP header collection. */ toString() { return JSON.stringify(this.toJSON({ preserveCase: true })); } /** * Iterate over tuples of header [name, value] pairs. */ [Symbol.iterator]() { return headerIterator(this._headersMap); } }; /** * Creates an object that satisfies the `HttpHeaders` interface. * @param rawHeaders - A simple object representing initial headers */ function createHttpHeaders$1(rawHeaders) { return new HttpHeadersImpl(rawHeaders); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js var require_schemes = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/schemes.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js var require_oauth2Flows = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/oauth2Flows.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js var require_uuidUtils = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/uuidUtils.js"(exports) { var _a$1; Object.defineProperty(exports, "__esModule", { value: true }); exports.randomUUID = randomUUID$1; const node_crypto_1$1 = __require("node:crypto"); const uuidFunction = typeof ((_a$1 = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a$1 === void 0 ? void 0 : _a$1.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : node_crypto_1$1.randomUUID; /** * Generated Universally Unique Identifier * * @returns RFC4122 v4 UUID. */ function randomUUID$1() { return uuidFunction(); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js var require_pipelineRequest$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/pipelineRequest.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createPipelineRequest = createPipelineRequest$1; const httpHeaders_js_1$7 = require_httpHeaders$1(); const uuidUtils_js_1$2 = require_uuidUtils(); var PipelineRequestImpl = class { constructor(options) { var _a$2, _b$1, _c$1, _d$1, _e, _f, _g; this.url = options.url; this.body = options.body; this.headers = (_a$2 = options.headers) !== null && _a$2 !== void 0 ? _a$2 : (0, httpHeaders_js_1$7.createHttpHeaders)(); this.method = (_b$1 = options.method) !== null && _b$1 !== void 0 ? _b$1 : "GET"; this.timeout = (_c$1 = options.timeout) !== null && _c$1 !== void 0 ? _c$1 : 0; this.multipartBody = options.multipartBody; this.formData = options.formData; this.disableKeepAlive = (_d$1 = options.disableKeepAlive) !== null && _d$1 !== void 0 ? _d$1 : false; this.proxySettings = options.proxySettings; this.streamResponseStatusCodes = options.streamResponseStatusCodes; this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; this.abortSignal = options.abortSignal; this.onUploadProgress = options.onUploadProgress; this.onDownloadProgress = options.onDownloadProgress; this.requestId = options.requestId || (0, uuidUtils_js_1$2.randomUUID)(); this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; this.requestOverrides = options.requestOverrides; } }; /** * Creates a new pipeline request with the given options. * This method is to allow for the easy setting of default values and not required. * @param options - The options to create the request with. */ function createPipelineRequest$1(options) { return new PipelineRequestImpl(options); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js var require_pipeline$2 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/pipeline.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createEmptyPipeline = createEmptyPipeline$1; const ValidPhaseNames = new Set([ "Deserialize", "Serialize", "Retry", "Sign" ]); /** * A private implementation of Pipeline. * Do not export this class from the package. * @internal */ var HttpPipeline = class HttpPipeline { constructor(policies) { var _a$2; this._policies = []; this._policies = (_a$2 = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a$2 !== void 0 ? _a$2 : []; this._orderedPolicies = void 0; } addPolicy(policy, options = {}) { if (options.phase && options.afterPhase) throw new Error("Policies inside a phase cannot specify afterPhase."); if (options.phase && !ValidPhaseNames.has(options.phase)) throw new Error(`Invalid phase name: ${options.phase}`); if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); this._policies.push({ policy, options }); this._orderedPolicies = void 0; } removePolicy(options) { const removedPolicies = []; this._policies = this._policies.filter((policyDescriptor) => { if (options.name && policyDescriptor.policy.name === options.name || options.phase && policyDescriptor.options.phase === options.phase) { removedPolicies.push(policyDescriptor.policy); return false; } else return true; }); this._orderedPolicies = void 0; return removedPolicies; } sendRequest(httpClient, request) { const policies = this.getOrderedPolicies(); const pipeline = policies.reduceRight((next, policy) => { return (req$1) => { return policy.sendRequest(req$1, next); }; }, (req$1) => httpClient.sendRequest(req$1)); return pipeline(request); } getOrderedPolicies() { if (!this._orderedPolicies) this._orderedPolicies = this.orderPolicies(); return this._orderedPolicies; } clone() { return new HttpPipeline(this._policies); } static create() { return new HttpPipeline(); } orderPolicies() { /** * The goal of this method is to reliably order pipeline policies * based on their declared requirements when they were added. * * Order is first determined by phase: * * 1. Serialize Phase * 2. Policies not in a phase * 3. Deserialize Phase * 4. Retry Phase * 5. Sign Phase * * Within each phase, policies are executed in the order * they were added unless they were specified to execute * before/after other policies or after a particular phase. * * To determine the final order, we will walk the policy list * in phase order multiple times until all dependencies are * satisfied. * * `afterPolicies` are the set of policies that must be * executed before a given policy. This requirement is * considered satisfied when each of the listed policies * have been scheduled. * * `beforePolicies` are the set of policies that must be * executed after a given policy. Since this dependency * can be expressed by converting it into a equivalent * `afterPolicies` declarations, they are normalized * into that form for simplicity. * * An `afterPhase` dependency is considered satisfied when all * policies in that phase have scheduled. * */ const result = []; const policyMap = new Map(); function createPhase(name) { return { name, policies: new Set(), hasRun: false, hasAfterPolicies: false }; } const serializePhase = createPhase("Serialize"); const noPhase = createPhase("None"); const deserializePhase = createPhase("Deserialize"); const retryPhase = createPhase("Retry"); const signPhase = createPhase("Sign"); const orderedPhases = [ serializePhase, noPhase, deserializePhase, retryPhase, signPhase ]; function getPhase(phase) { if (phase === "Retry") return retryPhase; else if (phase === "Serialize") return serializePhase; else if (phase === "Deserialize") return deserializePhase; else if (phase === "Sign") return signPhase; else return noPhase; } for (const descriptor of this._policies) { const policy = descriptor.policy; const options = descriptor.options; const policyName = policy.name; if (policyMap.has(policyName)) throw new Error("Duplicate policy names not allowed in pipeline"); const node = { policy, dependsOn: new Set(), dependants: new Set() }; if (options.afterPhase) { node.afterPhase = getPhase(options.afterPhase); node.afterPhase.hasAfterPolicies = true; } policyMap.set(policyName, node); const phase = getPhase(options.phase); phase.policies.add(node); } for (const descriptor of this._policies) { const { policy, options } = descriptor; const policyName = policy.name; const node = policyMap.get(policyName); if (!node) throw new Error(`Missing node for policy ${policyName}`); if (options.afterPolicies) for (const afterPolicyName of options.afterPolicies) { const afterNode = policyMap.get(afterPolicyName); if (afterNode) { node.dependsOn.add(afterNode); afterNode.dependants.add(node); } } if (options.beforePolicies) for (const beforePolicyName of options.beforePolicies) { const beforeNode = policyMap.get(beforePolicyName); if (beforeNode) { beforeNode.dependsOn.add(node); node.dependants.add(beforeNode); } } } function walkPhase(phase) { phase.hasRun = true; for (const node of phase.policies) { if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) continue; if (node.dependsOn.size === 0) { result.push(node.policy); for (const dependant of node.dependants) dependant.dependsOn.delete(node); policyMap.delete(node.policy.name); phase.policies.delete(node); } } } function walkPhases() { for (const phase of orderedPhases) { walkPhase(phase); if (phase.policies.size > 0 && phase !== noPhase) { if (!noPhase.hasRun) walkPhase(noPhase); return; } if (phase.hasAfterPolicies) walkPhase(noPhase); } } let iteration = 0; while (policyMap.size > 0) { iteration++; const initialResultLength = result.length; walkPhases(); if (result.length <= initialResultLength && iteration > 1) throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); } return result; } }; /** * Creates a totally empty pipeline. * Useful for testing or creating a custom one. */ function createEmptyPipeline$1() { return HttpPipeline.create(); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js var require_object = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/object.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isObject = isObject$1; /** * Helper to determine when an input is a generic JS object. * @returns true when input is an object type that is not null, Array, RegExp, or Date. */ function isObject$1(input) { return typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js var require_error$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/error.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isError = isError$1; const object_js_1$2 = require_object(); /** * Typeguard for an error object shape (has name and message) * @param e - Something caught by a catch clause. */ function isError$1(e) { if ((0, object_js_1$2.isObject)(e)) { const hasName = typeof e.name === "string"; const hasMessage = typeof e.message === "string"; return hasName && hasMessage; } return false; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js var require_inspect = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/inspect.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.custom = void 0; const node_util_1 = __require("node:util"); exports.custom = node_util_1.inspect.custom; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js var require_sanitizer = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sanitizer.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Sanitizer = void 0; const object_js_1$1 = require_object(); const RedactedString = "REDACTED"; const defaultAllowedHeaderNames = [ "x-ms-client-request-id", "x-ms-return-client-request-id", "x-ms-useragent", "x-ms-correlation-request-id", "x-ms-request-id", "client-request-id", "ms-cv", "return-client-request-id", "traceparent", "Access-Control-Allow-Credentials", "Access-Control-Allow-Headers", "Access-Control-Allow-Methods", "Access-Control-Allow-Origin", "Access-Control-Expose-Headers", "Access-Control-Max-Age", "Access-Control-Request-Headers", "Access-Control-Request-Method", "Origin", "Accept", "Accept-Encoding", "Cache-Control", "Connection", "Content-Length", "Content-Type", "Date", "ETag", "Expires", "If-Match", "If-Modified-Since", "If-None-Match", "If-Unmodified-Since", "Last-Modified", "Pragma", "Request-Id", "Retry-After", "Server", "Transfer-Encoding", "User-Agent", "WWW-Authenticate" ]; const defaultAllowedQueryParameters = ["api-version"]; /** * A utility class to sanitize objects for logging. */ var Sanitizer = class { constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [] } = {}) { allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); } /** * Sanitizes an object for logging. * @param obj - The object to sanitize * @returns - The sanitized object as a string */ sanitize(obj) { const seen = new Set(); return JSON.stringify(obj, (key, value) => { if (value instanceof Error) return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); if (key === "headers") return this.sanitizeHeaders(value); else if (key === "url") return this.sanitizeUrl(value); else if (key === "query") return this.sanitizeQuery(value); else if (key === "body") return void 0; else if (key === "response") return void 0; else if (key === "operationSpec") return void 0; else if (Array.isArray(value) || (0, object_js_1$1.isObject)(value)) { if (seen.has(value)) return "[Circular]"; seen.add(value); } return value; }, 2); } /** * Sanitizes a URL for logging. * @param value - The URL to sanitize * @returns - The sanitized URL as a string */ sanitizeUrl(value) { if (typeof value !== "string" || value === null || value === "") return value; const url$1 = new URL(value); if (!url$1.search) return value; for (const [key] of url$1.searchParams) if (!this.allowedQueryParameters.has(key.toLowerCase())) url$1.searchParams.set(key, RedactedString); return url$1.toString(); } sanitizeHeaders(obj) { const sanitized = {}; for (const key of Object.keys(obj)) if (this.allowedHeaderNames.has(key.toLowerCase())) sanitized[key] = obj[key]; else sanitized[key] = RedactedString; return sanitized; } sanitizeQuery(value) { if (typeof value !== "object" || value === null) return value; const sanitized = {}; for (const k of Object.keys(value)) if (this.allowedQueryParameters.has(k.toLowerCase())) sanitized[k] = value[k]; else sanitized[k] = RedactedString; return sanitized; } }; exports.Sanitizer = Sanitizer; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js var require_restError$2 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/restError.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.RestError = void 0; exports.isRestError = isRestError$1; const error_js_1$2 = require_error$1(); const inspect_js_1 = require_inspect(); const sanitizer_js_1$3 = require_sanitizer(); const errorSanitizer = new sanitizer_js_1$3.Sanitizer(); /** * A custom error type for failed pipeline requests. */ var RestError$1 = class RestError$1 extends Error { constructor(message, options = {}) { super(message); this.name = "RestError"; this.code = options.code; this.statusCode = options.statusCode; Object.defineProperty(this, "request", { value: options.request, enumerable: false }); Object.defineProperty(this, "response", { value: options.response, enumerable: false }); Object.defineProperty(this, inspect_js_1.custom, { value: () => { return `RestError: ${this.message} \n ${errorSanitizer.sanitize(Object.assign(Object.assign({}, this), { request: this.request, response: this.response }))}`; }, enumerable: false }); Object.setPrototypeOf(this, RestError$1.prototype); } }; exports.RestError = RestError$1; /** * Something went wrong when making the request. * This means the actual request failed for some reason, * such as a DNS issue or the connection being lost. */ RestError$1.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; /** * This means that parsing the response from the server failed. * It may have been malformed. */ RestError$1.PARSE_ERROR = "PARSE_ERROR"; /** * Typeguard for RestError * @param e - Something caught by a catch clause. */ function isRestError$1(e) { if (e instanceof RestError$1) return true; return (0, error_js_1$2.isError)(e) && e.name === "RestError"; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js var require_bytesEncoding = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/bytesEncoding.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.uint8ArrayToString = uint8ArrayToString$1; exports.stringToUint8Array = stringToUint8Array$1; /** * The helper that transforms bytes with specific character encoding into string * @param bytes - the uint8array bytes * @param format - the format we use to encode the byte * @returns a string of the encoded string */ function uint8ArrayToString$1(bytes, format) { return Buffer.from(bytes).toString(format); } /** * The helper that transforms string to specific character encoded bytes array. * @param value - the string to be converted * @param format - the format we use to decode the value * @returns a uint8array */ function stringToUint8Array$1(value, format) { return Buffer.from(value, format); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js var require_log$2 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/log.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logger = void 0; const logger_js_1$6 = require_logger$1(); exports.logger = (0, logger_js_1$6.createClientLogger)("ts-http-runtime"); } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js var require_nodeHttpClient = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/nodeHttpClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getBodyLength = getBodyLength; exports.createNodeHttpClient = createNodeHttpClient; const tslib_1$7 = require_tslib(); const http$2 = tslib_1$7.__importStar(__require("node:http")); const https$1 = tslib_1$7.__importStar(__require("node:https")); const zlib = tslib_1$7.__importStar(__require("node:zlib")); const node_stream_1 = __require("node:stream"); const AbortError_js_1$4 = require_AbortError$1(); const httpHeaders_js_1$6 = require_httpHeaders$1(); const restError_js_1$7 = require_restError$2(); const log_js_1$9 = require_log$2(); const sanitizer_js_1$2 = require_sanitizer(); const DEFAULT_TLS_SETTINGS = {}; function isReadableStream$1(body$1) { return body$1 && typeof body$1.pipe === "function"; } function isStreamComplete(stream$3) { if (stream$3.readable === false) return Promise.resolve(); return new Promise((resolve) => { const handler = () => { resolve(); stream$3.removeListener("close", handler); stream$3.removeListener("end", handler); stream$3.removeListener("error", handler); }; stream$3.on("close", handler); stream$3.on("end", handler); stream$3.on("error", handler); }); } function isArrayBuffer(body$1) { return body$1 && typeof body$1.byteLength === "number"; } var ReportTransform = class extends node_stream_1.Transform { _transform(chunk, _encoding, callback) { this.push(chunk); this.loadedBytes += chunk.length; try { this.progressCallback({ loadedBytes: this.loadedBytes }); callback(); } catch (e) { callback(e); } } constructor(progressCallback) { super(); this.loadedBytes = 0; this.progressCallback = progressCallback; } }; /** * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. * @internal */ var NodeHttpClient = class { constructor() { this.cachedHttpsAgents = new WeakMap(); } /** * Makes a request over an underlying transport layer and returns the response. * @param request - The request to be made. */ async sendRequest(request) { var _a$2, _b$1, _c$1; const abortController$1 = new AbortController(); let abortListener; if (request.abortSignal) { if (request.abortSignal.aborted) throw new AbortError_js_1$4.AbortError("The operation was aborted. Request has already been canceled."); abortListener = (event) => { if (event.type === "abort") abortController$1.abort(); }; request.abortSignal.addEventListener("abort", abortListener); } let timeoutId; if (request.timeout > 0) timeoutId = setTimeout(() => { const sanitizer = new sanitizer_js_1$2.Sanitizer(); log_js_1$9.logger.info(`request to '${sanitizer.sanitizeUrl(request.url)}' timed out. canceling...`); abortController$1.abort(); }, request.timeout); const acceptEncoding = request.headers.get("Accept-Encoding"); const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); let body$1 = typeof request.body === "function" ? request.body() : request.body; if (body$1 && !request.headers.has("Content-Length")) { const bodyLength = getBodyLength(body$1); if (bodyLength !== null) request.headers.set("Content-Length", bodyLength); } let responseStream; try { if (body$1 && request.onUploadProgress) { const onUploadProgress = request.onUploadProgress; const uploadReportStream = new ReportTransform(onUploadProgress); uploadReportStream.on("error", (e) => { log_js_1$9.logger.error("Error in upload progress", e); }); if (isReadableStream$1(body$1)) body$1.pipe(uploadReportStream); else uploadReportStream.end(body$1); body$1 = uploadReportStream; } const res = await this.makeRequest(request, abortController$1, body$1); if (timeoutId !== void 0) clearTimeout(timeoutId); const headers = getResponseHeaders(res); const status = (_a$2 = res.statusCode) !== null && _a$2 !== void 0 ? _a$2 : 0; const response = { status, headers, request }; if (request.method === "HEAD") { res.resume(); return response; } responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; const onDownloadProgress = request.onDownloadProgress; if (onDownloadProgress) { const downloadReportStream = new ReportTransform(onDownloadProgress); downloadReportStream.on("error", (e) => { log_js_1$9.logger.error("Error in download progress", e); }); responseStream.pipe(downloadReportStream); responseStream = downloadReportStream; } if (((_b$1 = request.streamResponseStatusCodes) === null || _b$1 === void 0 ? void 0 : _b$1.has(Number.POSITIVE_INFINITY)) || ((_c$1 = request.streamResponseStatusCodes) === null || _c$1 === void 0 ? void 0 : _c$1.has(response.status))) response.readableStreamBody = responseStream; else response.bodyAsText = await streamToText(responseStream); return response; } finally { if (request.abortSignal && abortListener) { let uploadStreamDone = Promise.resolve(); if (isReadableStream$1(body$1)) uploadStreamDone = isStreamComplete(body$1); let downloadStreamDone = Promise.resolve(); if (isReadableStream$1(responseStream)) downloadStreamDone = isStreamComplete(responseStream); Promise.all([uploadStreamDone, downloadStreamDone]).then(() => { var _a$3; if (abortListener) (_a$3 = request.abortSignal) === null || _a$3 === void 0 || _a$3.removeEventListener("abort", abortListener); }).catch((e) => { log_js_1$9.logger.warning("Error when cleaning up abortListener on httpRequest", e); }); } } } makeRequest(request, abortController$1, body$1) { var _a$2; const url$1 = new URL(request.url); const isInsecure = url$1.protocol !== "https:"; if (isInsecure && !request.allowInsecureConnection) throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); const agent = (_a$2 = request.agent) !== null && _a$2 !== void 0 ? _a$2 : this.getOrCreateAgent(request, isInsecure); const options = Object.assign({ agent, hostname: url$1.hostname, path: `${url$1.pathname}${url$1.search}`, port: url$1.port, method: request.method, headers: request.headers.toJSON({ preserveCase: true }) }, request.requestOverrides); return new Promise((resolve, reject) => { const req$1 = isInsecure ? http$2.request(options, resolve) : https$1.request(options, resolve); req$1.once("error", (err) => { var _a$3; reject(new restError_js_1$7.RestError(err.message, { code: (_a$3 = err.code) !== null && _a$3 !== void 0 ? _a$3 : restError_js_1$7.RestError.REQUEST_SEND_ERROR, request })); }); abortController$1.signal.addEventListener("abort", () => { const abortError = new AbortError_js_1$4.AbortError("The operation was aborted. Rejecting from abort signal callback while making request."); req$1.destroy(abortError); reject(abortError); }); if (body$1 && isReadableStream$1(body$1)) body$1.pipe(req$1); else if (body$1) if (typeof body$1 === "string" || Buffer.isBuffer(body$1)) req$1.end(body$1); else if (isArrayBuffer(body$1)) req$1.end(ArrayBuffer.isView(body$1) ? Buffer.from(body$1.buffer) : Buffer.from(body$1)); else { log_js_1$9.logger.error("Unrecognized body type", body$1); reject(new restError_js_1$7.RestError("Unrecognized body type")); } else req$1.end(); }); } getOrCreateAgent(request, isInsecure) { var _a$2; const disableKeepAlive = request.disableKeepAlive; if (isInsecure) { if (disableKeepAlive) return http$2.globalAgent; if (!this.cachedHttpAgent) this.cachedHttpAgent = new http$2.Agent({ keepAlive: true }); return this.cachedHttpAgent; } else { if (disableKeepAlive && !request.tlsSettings) return https$1.globalAgent; const tlsSettings = (_a$2 = request.tlsSettings) !== null && _a$2 !== void 0 ? _a$2 : DEFAULT_TLS_SETTINGS; let agent = this.cachedHttpsAgents.get(tlsSettings); if (agent && agent.options.keepAlive === !disableKeepAlive) return agent; log_js_1$9.logger.info("No cached TLS Agent exist, creating a new Agent"); agent = new https$1.Agent(Object.assign({ keepAlive: !disableKeepAlive }, tlsSettings)); this.cachedHttpsAgents.set(tlsSettings, agent); return agent; } } }; function getResponseHeaders(res) { const headers = (0, httpHeaders_js_1$6.createHttpHeaders)(); for (const header of Object.keys(res.headers)) { const value = res.headers[header]; if (Array.isArray(value)) { if (value.length > 0) headers.set(header, value[0]); } else if (value) headers.set(header, value); } return headers; } function getDecodedResponseStream(stream$3, headers) { const contentEncoding = headers.get("Content-Encoding"); if (contentEncoding === "gzip") { const unzip = zlib.createGunzip(); stream$3.pipe(unzip); return unzip; } else if (contentEncoding === "deflate") { const inflate = zlib.createInflate(); stream$3.pipe(inflate); return inflate; } return stream$3; } function streamToText(stream$3) { return new Promise((resolve, reject) => { const buffer$2 = []; stream$3.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) buffer$2.push(chunk); else buffer$2.push(Buffer.from(chunk)); }); stream$3.on("end", () => { resolve(Buffer.concat(buffer$2).toString("utf8")); }); stream$3.on("error", (e) => { if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") reject(e); else reject(new restError_js_1$7.RestError(`Error reading response as text: ${e.message}`, { code: restError_js_1$7.RestError.PARSE_ERROR })); }); }); } /** @internal */ function getBodyLength(body$1) { if (!body$1) return 0; else if (Buffer.isBuffer(body$1)) return body$1.length; else if (isReadableStream$1(body$1)) return null; else if (isArrayBuffer(body$1)) return body$1.byteLength; else if (typeof body$1 === "string") return Buffer.from(body$1).length; else return null; } /** * Create a new HttpClient instance for the NodeJS environment. * @internal */ function createNodeHttpClient() { return new NodeHttpClient(); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js var require_defaultHttpClient$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/defaultHttpClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createDefaultHttpClient = createDefaultHttpClient$1; const nodeHttpClient_js_1 = require_nodeHttpClient(); /** * Create the correct HttpClient for the current environment. */ function createDefaultHttpClient$1() { return (0, nodeHttpClient_js_1.createNodeHttpClient)(); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js var require_logPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/logPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logPolicyName = void 0; exports.logPolicy = logPolicy$1; const log_js_1$8 = require_log$2(); const sanitizer_js_1$1 = require_sanitizer(); /** * The programmatic identifier of the logPolicy. */ exports.logPolicyName = "logPolicy"; /** * A policy that logs all requests and responses. * @param options - Options to configure logPolicy. */ function logPolicy$1(options = {}) { var _a$2; const logger$2 = (_a$2 = options.logger) !== null && _a$2 !== void 0 ? _a$2 : log_js_1$8.logger.info; const sanitizer = new sanitizer_js_1$1.Sanitizer({ additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, additionalAllowedQueryParameters: options.additionalAllowedQueryParameters }); return { name: exports.logPolicyName, async sendRequest(request, next) { if (!logger$2.enabled) return next(request); logger$2(`Request: ${sanitizer.sanitize(request)}`); const response = await next(request); logger$2(`Response status code: ${response.status}`); logger$2(`Headers: ${sanitizer.sanitize(response.headers)}`); return response; } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js var require_redirectPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/redirectPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.redirectPolicyName = void 0; exports.redirectPolicy = redirectPolicy$1; /** * The programmatic identifier of the redirectPolicy. */ exports.redirectPolicyName = "redirectPolicy"; /** * Methods that are allowed to follow redirects 301 and 302 */ const allowedRedirect = ["GET", "HEAD"]; /** * A policy to follow Location headers from the server in order * to support server-side redirection. * In the browser, this policy is not used. * @param options - Options to control policy behavior. */ function redirectPolicy$1(options = {}) { const { maxRetries = 20 } = options; return { name: exports.redirectPolicyName, async sendRequest(request, next) { const response = await next(request); return handleRedirect(next, response, maxRetries); } }; } async function handleRedirect(next, response, maxRetries, currentRetries = 0) { const { request, status, headers } = response; const locationHeader = headers.get("location"); if (locationHeader && (status === 300 || status === 301 && allowedRedirect.includes(request.method) || status === 302 && allowedRedirect.includes(request.method) || status === 303 && request.method === "POST" || status === 307) && currentRetries < maxRetries) { const url$1 = new URL(locationHeader, request.url); request.url = url$1.toString(); if (status === 303) { request.method = "GET"; request.headers.delete("Content-Length"); delete request.body; } request.headers.delete("Authorization"); const res = await next(request); return handleRedirect(next, res, maxRetries, currentRetries + 1); } return response; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js var require_userAgentPlatform$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgentPlatform.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getHeaderName = getHeaderName$1; exports.setPlatformSpecificData = setPlatformSpecificData$1; const tslib_1$6 = require_tslib(); const os$2 = tslib_1$6.__importStar(__require("node:os")); const process$3 = tslib_1$6.__importStar(__require("node:process")); /** * @internal */ function getHeaderName$1() { return "User-Agent"; } /** * @internal */ async function setPlatformSpecificData$1(map) { if (process$3 && process$3.versions) { const versions = process$3.versions; if (versions.bun) map.set("Bun", versions.bun); else if (versions.deno) map.set("Deno", versions.deno); else if (versions.node) map.set("Node", versions.node); } map.set("OS", `(${os$2.arch()}-${os$2.type()}-${os$2.release()})`); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js var require_constants$2 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/constants.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; exports.SDK_VERSION = "0.2.2"; exports.DEFAULT_RETRY_POLICY_COUNT = 3; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js var require_userAgent$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/userAgent.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getUserAgentHeaderName = getUserAgentHeaderName$1; exports.getUserAgentValue = getUserAgentValue$1; const userAgentPlatform_js_1$1 = require_userAgentPlatform$1(); const constants_js_1$11 = require_constants$2(); function getUserAgentString$2(telemetryInfo) { const parts = []; for (const [key, value] of telemetryInfo) { const token = value ? `${key}/${value}` : key; parts.push(token); } return parts.join(" "); } /** * @internal */ function getUserAgentHeaderName$1() { return (0, userAgentPlatform_js_1$1.getHeaderName)(); } /** * @internal */ async function getUserAgentValue$1(prefix$1) { const runtimeInfo = new Map(); runtimeInfo.set("ts-http-runtime", constants_js_1$11.SDK_VERSION); await (0, userAgentPlatform_js_1$1.setPlatformSpecificData)(runtimeInfo); const defaultAgent = getUserAgentString$2(runtimeInfo); const userAgentValue = prefix$1 ? `${prefix$1} ${defaultAgent}` : defaultAgent; return userAgentValue; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js var require_userAgentPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/userAgentPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.userAgentPolicyName = void 0; exports.userAgentPolicy = userAgentPolicy$1; const userAgent_js_1$2 = require_userAgent$1(); const UserAgentHeaderName$1 = (0, userAgent_js_1$2.getUserAgentHeaderName)(); /** * The programmatic identifier of the userAgentPolicy. */ exports.userAgentPolicyName = "userAgentPolicy"; /** * A policy that sets the User-Agent header (or equivalent) to reflect * the library version. * @param options - Options to customize the user agent value. */ function userAgentPolicy$1(options = {}) { const userAgentValue = (0, userAgent_js_1$2.getUserAgentValue)(options.userAgentPrefix); return { name: exports.userAgentPolicyName, async sendRequest(request, next) { if (!request.headers.has(UserAgentHeaderName$1)) request.headers.set(UserAgentHeaderName$1, await userAgentValue); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js var require_decompressResponsePolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/decompressResponsePolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.decompressResponsePolicyName = void 0; exports.decompressResponsePolicy = decompressResponsePolicy$1; /** * The programmatic identifier of the decompressResponsePolicy. */ exports.decompressResponsePolicyName = "decompressResponsePolicy"; /** * A policy to enable response decompression according to Accept-Encoding header * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding */ function decompressResponsePolicy$1() { return { name: exports.decompressResponsePolicyName, async sendRequest(request, next) { if (request.method !== "HEAD") request.headers.set("Accept-Encoding", "gzip,deflate"); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js var require_random = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/random.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getRandomIntegerInclusive = getRandomIntegerInclusive$1; /** * Returns a random integer value between a lower and upper bound, * inclusive of both bounds. * Note that this uses Math.random and isn't secure. If you need to use * this for any kind of security purpose, find a better source of random. * @param min - The smallest integer value allowed. * @param max - The largest integer value allowed. */ function getRandomIntegerInclusive$1(min, max) { min = Math.ceil(min); max = Math.floor(max); const offset = Math.floor(Math.random() * (max - min + 1)); return offset + min; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js var require_delay$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/delay.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.calculateRetryDelay = calculateRetryDelay$2; const random_js_1$1 = require_random(); /** * Calculates the delay interval for retry attempts using exponential delay with jitter. * @param retryAttempt - The current retry attempt number. * @param config - The exponential retry configuration. * @returns An object containing the calculated retry delay. */ function calculateRetryDelay$2(retryAttempt, config) { const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); const retryAfterInMs = clampedDelay / 2 + (0, random_js_1$1.getRandomIntegerInclusive)(0, clampedDelay / 2); return { retryAfterInMs }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js var require_helpers$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/helpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.delay = delay$3; exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber; const AbortError_js_1$3 = require_AbortError$1(); const StandardAbortMessage$1 = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. * @param value - The value to be resolved with after a timeout of t milliseconds. * @param options - The options for delay - currently abort options * - abortSignal - The abortSignal associated with containing operation. * - abortErrorMsg - The abort error message associated with containing operation. * @returns Resolved promise */ function delay$3(delayInMs, value, options) { return new Promise((resolve, reject) => { let timer = void 0; let onAborted = void 0; const rejectOnAbort = () => { return reject(new AbortError_js_1$3.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) options.abortSignal.removeEventListener("abort", onAborted); }; onAborted = () => { if (timer) clearTimeout(timer); removeListeners(); return rejectOnAbort(); }; if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) return rejectOnAbort(); timer = setTimeout(() => { removeListeners(); resolve(value); }, delayInMs); if (options === null || options === void 0 ? void 0 : options.abortSignal) options.abortSignal.addEventListener("abort", onAborted); }); } /** * @internal * @returns the parsed value or undefined if the parsed value is invalid. */ function parseHeaderValueAsNumber(response, headerName) { const value = response.headers.get(headerName); if (!value) return; const valueAsNum = Number(value); if (Number.isNaN(valueAsNum)) return; return valueAsNum; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js var require_throttlingRetryStrategy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/throttlingRetryStrategy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isThrottlingRetryResponse = isThrottlingRetryResponse; exports.throttlingRetryStrategy = throttlingRetryStrategy; const helpers_js_1$1 = require_helpers$1(); /** * The header that comes back from services representing * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). */ const RetryAfterHeader = "Retry-After"; /** * The headers that come back from services representing * the amount of time (minimum) to wait to retry. * * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds * "Retry-After" : seconds or timestamp */ const AllRetryAfterHeaders = [ "retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader ]; /** * A response is a throttling retry response if it has a throttling status code (429 or 503), * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. * * Returns the `retryAfterInMs` value if the response is a throttling retry response. * If not throttling retry response, returns `undefined`. * * @internal */ function getRetryAfterInMs(response) { if (!(response && [429, 503].includes(response.status))) return void 0; try { for (const header of AllRetryAfterHeaders) { const retryAfterValue = (0, helpers_js_1$1.parseHeaderValueAsNumber)(response, header); if (retryAfterValue === 0 || retryAfterValue) { const multiplyingFactor = header === RetryAfterHeader ? 1e3 : 1; return retryAfterValue * multiplyingFactor; } } const retryAfterHeader = response.headers.get(RetryAfterHeader); if (!retryAfterHeader) return; const date = Date.parse(retryAfterHeader); const diff = date - Date.now(); return Number.isFinite(diff) ? Math.max(0, diff) : void 0; } catch (_a$2) { return void 0; } } /** * A response is a retry response if it has a throttling status code (429 or 503), * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. */ function isThrottlingRetryResponse(response) { return Number.isFinite(getRetryAfterInMs(response)); } function throttlingRetryStrategy() { return { name: "throttlingRetryStrategy", retry({ response }) { const retryAfterInMs = getRetryAfterInMs(response); if (!Number.isFinite(retryAfterInMs)) return { skipStrategy: true }; return { retryAfterInMs }; } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js var require_exponentialRetryStrategy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/retryStrategies/exponentialRetryStrategy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.exponentialRetryStrategy = exponentialRetryStrategy; exports.isExponentialRetryResponse = isExponentialRetryResponse; exports.isSystemError = isSystemError; const delay_js_1$2 = require_delay$1(); const throttlingRetryStrategy_js_1$2 = require_throttlingRetryStrategy(); const DEFAULT_CLIENT_RETRY_INTERVAL = 1e3; const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1e3 * 64; /** * A retry strategy that retries with an exponentially increasing delay in these two cases: * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). */ function exponentialRetryStrategy(options = {}) { var _a$2, _b$1; const retryInterval = (_a$2 = options.retryDelayInMs) !== null && _a$2 !== void 0 ? _a$2 : DEFAULT_CLIENT_RETRY_INTERVAL; const maxRetryInterval = (_b$1 = options.maxRetryDelayInMs) !== null && _b$1 !== void 0 ? _b$1 : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; return { name: "exponentialRetryStrategy", retry({ retryCount, response, responseError }) { const matchedSystemError = isSystemError(responseError); const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; const isExponential = isExponentialRetryResponse(response); const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; const unknownResponse = response && ((0, throttlingRetryStrategy_js_1$2.isThrottlingRetryResponse)(response) || !isExponential); if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) return { skipStrategy: true }; if (responseError && !matchedSystemError && !isExponential) return { errorToThrow: responseError }; return (0, delay_js_1$2.calculateRetryDelay)(retryCount, { retryDelayInMs: retryInterval, maxRetryDelayInMs: maxRetryInterval }); } }; } /** * A response is a retry response if it has status codes: * - 408, or * - Greater or equal than 500, except for 501 and 505. */ function isExponentialRetryResponse(response) { return Boolean(response && response.status !== void 0 && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); } /** * Determines whether an error from a pipeline response was triggered in the network layer. */ function isSystemError(err) { if (!err) return false; return err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js var require_retryPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/retryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.retryPolicy = retryPolicy$1; const helpers_js_1 = require_helpers$1(); const AbortError_js_1$2 = require_AbortError$1(); const logger_js_1$5 = require_logger$1(); const constants_js_1$10 = require_constants$2(); const retryPolicyLogger$1 = (0, logger_js_1$5.createClientLogger)("ts-http-runtime retryPolicy"); /** * The programmatic identifier of the retryPolicy. */ const retryPolicyName = "retryPolicy"; /** * retryPolicy is a generic policy to enable retrying requests when certain conditions are met */ function retryPolicy$1(strategies, options = { maxRetries: constants_js_1$10.DEFAULT_RETRY_POLICY_COUNT }) { const logger$2 = options.logger || retryPolicyLogger$1; return { name: retryPolicyName, async sendRequest(request, next) { var _a$2, _b$1; let response; let responseError; let retryCount = -1; retryRequest: while (true) { retryCount += 1; response = void 0; responseError = void 0; try { logger$2.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); response = await next(request); logger$2.info(`Retry ${retryCount}: Received a response from request`, request.requestId); } catch (e) { logger$2.error(`Retry ${retryCount}: Received an error from request`, request.requestId); responseError = e; if (!e || responseError.name !== "RestError") throw e; response = responseError.response; } if ((_a$2 = request.abortSignal) === null || _a$2 === void 0 ? void 0 : _a$2.aborted) { logger$2.error(`Retry ${retryCount}: Request aborted.`); const abortError = new AbortError_js_1$2.AbortError(); throw abortError; } if (retryCount >= ((_b$1 = options.maxRetries) !== null && _b$1 !== void 0 ? _b$1 : constants_js_1$10.DEFAULT_RETRY_POLICY_COUNT)) { logger$2.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); if (responseError) throw responseError; else if (response) return response; else throw new Error("Maximum retries reached with no response or error to throw"); } logger$2.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); strategiesLoop: for (const strategy of strategies) { const strategyLogger = strategy.logger || logger$2; strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); const modifiers = strategy.retry({ retryCount, response, responseError }); if (modifiers.skipStrategy) { strategyLogger.info(`Retry ${retryCount}: Skipped.`); continue strategiesLoop; } const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; if (errorToThrow) { strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); throw errorToThrow; } if (retryAfterInMs || retryAfterInMs === 0) { strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); await (0, helpers_js_1.delay)(retryAfterInMs, void 0, { abortSignal: request.abortSignal }); continue retryRequest; } if (redirectTo) { strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); request.url = redirectTo; continue retryRequest; } } if (responseError) { logger$2.info(`None of the retry strategies could work with the received error. Throwing it.`); throw responseError; } if (response) { logger$2.info(`None of the retry strategies could work with the received response. Returning it.`); return response; } } } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js var require_defaultRetryPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/defaultRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.defaultRetryPolicyName = void 0; exports.defaultRetryPolicy = defaultRetryPolicy$1; const exponentialRetryStrategy_js_1$2 = require_exponentialRetryStrategy(); const throttlingRetryStrategy_js_1$1 = require_throttlingRetryStrategy(); const retryPolicy_js_1$5 = require_retryPolicy$1(); const constants_js_1$9 = require_constants$2(); /** * Name of the {@link defaultRetryPolicy} */ exports.defaultRetryPolicyName = "defaultRetryPolicy"; /** * A policy that retries according to three strategies: * - When the server sends a 429 response with a Retry-After header. * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. */ function defaultRetryPolicy$1(options = {}) { var _a$2; return { name: exports.defaultRetryPolicyName, sendRequest: (0, retryPolicy_js_1$5.retryPolicy)([(0, throttlingRetryStrategy_js_1$1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1$2.exponentialRetryStrategy)(options)], { maxRetries: (_a$2 = options.maxRetries) !== null && _a$2 !== void 0 ? _a$2 : constants_js_1$9.DEFAULT_RETRY_POLICY_COUNT }).sendRequest }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js var require_checkEnvironment = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/checkEnvironment.js"(exports) { var _a, _b, _c, _d; Object.defineProperty(exports, "__esModule", { value: true }); exports.isReactNative = exports.isNodeRuntime = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; /** * A constant that indicates whether the environment the code is running is a Web Worker. */ exports.isWebWorker = typeof self === "object" && typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ exports.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; /** * A constant that indicates whether the environment the code is running is Bun.sh. */ exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ exports.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); /** * A constant that indicates whether the environment the code is running is Node.JS. */ exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js var require_formDataPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/formDataPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.formDataPolicyName = void 0; exports.formDataPolicy = formDataPolicy$1; const bytesEncoding_js_1$5 = require_bytesEncoding(); const checkEnvironment_js_1$3 = require_checkEnvironment(); const httpHeaders_js_1$5 = require_httpHeaders$1(); /** * The programmatic identifier of the formDataPolicy. */ exports.formDataPolicyName = "formDataPolicy"; function formDataToFormDataMap(formData) { var _a$2; const formDataMap = {}; for (const [key, value] of formData.entries()) { (_a$2 = formDataMap[key]) !== null && _a$2 !== void 0 || (formDataMap[key] = []); formDataMap[key].push(value); } return formDataMap; } /** * A policy that encodes FormData on the request into the body. */ function formDataPolicy$1() { return { name: exports.formDataPolicyName, async sendRequest(request, next) { if (checkEnvironment_js_1$3.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { request.formData = formDataToFormDataMap(request.body); request.body = void 0; } if (request.formData) { const contentType$1 = request.headers.get("Content-Type"); if (contentType$1 && contentType$1.indexOf("application/x-www-form-urlencoded") !== -1) request.body = wwwFormUrlEncode(request.formData); else await prepareFormData(request.formData, request); request.formData = void 0; } return next(request); } }; } function wwwFormUrlEncode(formData) { const urlSearchParams = new URLSearchParams(); for (const [key, value] of Object.entries(formData)) if (Array.isArray(value)) for (const subValue of value) urlSearchParams.append(key, subValue.toString()); else urlSearchParams.append(key, value.toString()); return urlSearchParams.toString(); } async function prepareFormData(formData, request) { const contentType$1 = request.headers.get("Content-Type"); if (contentType$1 && !contentType$1.startsWith("multipart/form-data")) return; request.headers.set("Content-Type", contentType$1 !== null && contentType$1 !== void 0 ? contentType$1 : "multipart/form-data"); const parts = []; for (const [fieldName, values] of Object.entries(formData)) for (const value of Array.isArray(values) ? values : [values]) if (typeof value === "string") parts.push({ headers: (0, httpHeaders_js_1$5.createHttpHeaders)({ "Content-Disposition": `form-data; name="${fieldName}"` }), body: (0, bytesEncoding_js_1$5.stringToUint8Array)(value, "utf-8") }); else if (value === void 0 || value === null || typeof value !== "object") throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); else { const fileName = value.name || "blob"; const headers = (0, httpHeaders_js_1$5.createHttpHeaders)(); headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); headers.set("Content-Type", value.type || "application/octet-stream"); parts.push({ headers, body: value }); } request.multipartBody = { parts }; } } }); //#endregion //#region node_modules/.deno/ms@2.1.3/node_modules/ms/index.js var require_ms = __commonJS({ "node_modules/.deno/ms@2.1.3/node_modules/ms/index.js"(exports, module) { /** * Helpers. */ var s = 1e3; var m = s * 60; var h = m * 60; var d = h * 24; var w = d * 7; var y = d * 365.25; /** * Parse or format the given `val`. * * Options: * * - `long` verbose formatting [false] * * @param {String|Number} val * @param {Object} [options] * @throws {Error} throw an error if val is not a non-empty string or a number * @return {String|Number} * @api public */ module.exports = function(val, options) { options = options || {}; var type = typeof val; if (type === "string" && val.length > 0) return parse$1(val); else if (type === "number" && isFinite(val)) return options.long ? fmtLong(val) : fmtShort(val); throw new Error("val is not a non-empty string or a valid number. val=" + JSON.stringify(val)); }; /** * Parse the given `str` and return milliseconds. * * @param {String} str * @return {Number} * @api private */ function parse$1(str) { str = String(str); if (str.length > 100) return; var match$2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(str); if (!match$2) return; var n = parseFloat(match$2[1]); var type = (match$2[2] || "ms").toLowerCase(); switch (type) { case "years": case "year": case "yrs": case "yr": case "y": return n * y; case "weeks": case "week": case "w": return n * w; case "days": case "day": case "d": return n * d; case "hours": case "hour": case "hrs": case "hr": case "h": return n * h; case "minutes": case "minute": case "mins": case "min": case "m": return n * m; case "seconds": case "second": case "secs": case "sec": case "s": return n * s; case "milliseconds": case "millisecond": case "msecs": case "msec": case "ms": return n; default: return void 0; } } /** * Short format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtShort(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) return Math.round(ms / d) + "d"; if (msAbs >= h) return Math.round(ms / h) + "h"; if (msAbs >= m) return Math.round(ms / m) + "m"; if (msAbs >= s) return Math.round(ms / s) + "s"; return ms + "ms"; } /** * Long format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtLong(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) return plural(ms, msAbs, d, "day"); if (msAbs >= h) return plural(ms, msAbs, h, "hour"); if (msAbs >= m) return plural(ms, msAbs, m, "minute"); if (msAbs >= s) return plural(ms, msAbs, s, "second"); return ms + " ms"; } /** * Pluralization helper. */ function plural(ms, msAbs, n, name) { var isPlural = msAbs >= n * 1.5; return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); } } }); //#endregion //#region node_modules/.deno/debug@4.4.1/node_modules/debug/src/common.js var require_common = __commonJS({ "node_modules/.deno/debug@4.4.1/node_modules/debug/src/common.js"(exports, module) { /** * This is the common logic for both the Node.js and web browser * implementations of `debug()`. */ function setup(env) { createDebug.debug = createDebug; createDebug.default = createDebug; createDebug.coerce = coerce; createDebug.disable = disable$1; createDebug.enable = enable$1; createDebug.enabled = enabled$1; createDebug.humanize = require_ms(); createDebug.destroy = destroy$1; Object.keys(env).forEach((key) => { createDebug[key] = env[key]; }); /** * The currently active debug mode names, and names to skip. */ createDebug.names = []; createDebug.skips = []; /** * Map of special "%n" handling functions, for the debug "format" argument. * * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". */ createDebug.formatters = {}; /** * Selects a color for a debug namespace * @param {String} namespace The namespace string for the debug instance to be colored * @return {Number|String} An ANSI color code for the given namespace * @api private */ function selectColor(namespace) { let hash = 0; for (let i = 0; i < namespace.length; i++) { hash = (hash << 5) - hash + namespace.charCodeAt(i); hash |= 0; } return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; } createDebug.selectColor = selectColor; /** * Create a debugger with the given `namespace`. * * @param {String} namespace * @return {Function} * @api public */ function createDebug(namespace) { let prevTime; let enableOverride = null; let namespacesCache; let enabledCache; function debug$3(...args) { if (!debug$3.enabled) return; const self$1 = debug$3; const curr = Number(new Date()); const ms = curr - (prevTime || curr); self$1.diff = ms; self$1.prev = prevTime; self$1.curr = curr; prevTime = curr; args[0] = createDebug.coerce(args[0]); if (typeof args[0] !== "string") args.unshift("%O"); let index = 0; args[0] = args[0].replace(/%([a-zA-Z%])/g, (match$2, format) => { if (match$2 === "%%") return "%"; index++; const formatter = createDebug.formatters[format]; if (typeof formatter === "function") { const val = args[index]; match$2 = formatter.call(self$1, val); args.splice(index, 1); index--; } return match$2; }); createDebug.formatArgs.call(self$1, args); const logFn = self$1.log || createDebug.log; logFn.apply(self$1, args); } debug$3.namespace = namespace; debug$3.useColors = createDebug.useColors(); debug$3.color = createDebug.selectColor(namespace); debug$3.extend = extend$1; debug$3.destroy = createDebug.destroy; Object.defineProperty(debug$3, "enabled", { enumerable: true, configurable: false, get: () => { if (enableOverride !== null) return enableOverride; if (namespacesCache !== createDebug.namespaces) { namespacesCache = createDebug.namespaces; enabledCache = createDebug.enabled(namespace); } return enabledCache; }, set: (v) => { enableOverride = v; } }); if (typeof createDebug.init === "function") createDebug.init(debug$3); return debug$3; } function extend$1(namespace, delimiter$1) { const newDebug = createDebug(this.namespace + (typeof delimiter$1 === "undefined" ? ":" : delimiter$1) + namespace); newDebug.log = this.log; return newDebug; } /** * Enables a debug mode by namespaces. This can include modes * separated by a colon and wildcards. * * @param {String} namespaces * @api public */ function enable$1(namespaces) { createDebug.save(namespaces); createDebug.namespaces = namespaces; createDebug.names = []; createDebug.skips = []; const split = (typeof namespaces === "string" ? namespaces : "").trim().replace(/\s+/g, ",").split(",").filter(Boolean); for (const ns of split) if (ns[0] === "-") createDebug.skips.push(ns.slice(1)); else createDebug.names.push(ns); } /** * Checks if the given string matches a namespace template, honoring * asterisks as wildcards. * * @param {String} search * @param {String} template * @return {Boolean} */ function matchesTemplate(search, template) { let searchIndex = 0; let templateIndex = 0; let starIndex = -1; let matchIndex = 0; while (searchIndex < search.length) if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === "*")) if (template[templateIndex] === "*") { starIndex = templateIndex; matchIndex = searchIndex; templateIndex++; } else { searchIndex++; templateIndex++; } else if (starIndex !== -1) { templateIndex = starIndex + 1; matchIndex++; searchIndex = matchIndex; } else return false; while (templateIndex < template.length && template[templateIndex] === "*") templateIndex++; return templateIndex === template.length; } /** * Disable debug output. * * @return {String} namespaces * @api public */ function disable$1() { const namespaces = [...createDebug.names, ...createDebug.skips.map((namespace) => "-" + namespace)].join(","); createDebug.enable(""); return namespaces; } /** * Returns true if the given mode name is enabled, false otherwise. * * @param {String} name * @return {Boolean} * @api public */ function enabled$1(name) { for (const skip of createDebug.skips) if (matchesTemplate(name, skip)) return false; for (const ns of createDebug.names) if (matchesTemplate(name, ns)) return true; return false; } /** * Coerce `val`. * * @param {Mixed} val * @return {Mixed} * @api private */ function coerce(val) { if (val instanceof Error) return val.stack || val.message; return val; } /** * XXX DO NOT USE. This is a temporary stub function. * XXX It WILL be removed in the next major release. */ function destroy$1() { console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } createDebug.enable(createDebug.load()); return createDebug; } module.exports = setup; } }); //#endregion //#region node_modules/.deno/debug@4.4.1/node_modules/debug/src/browser.js var require_browser = __commonJS({ "node_modules/.deno/debug@4.4.1/node_modules/debug/src/browser.js"(exports, module) { /** * This is the web browser implementation of `debug()`. */ exports.formatArgs = formatArgs$1; exports.save = save$1; exports.load = load$1; exports.useColors = useColors$1; exports.storage = localstorage(); exports.destroy = (() => { let warned = false; return () => { if (!warned) { warned = true; console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } }; })(); /** * Colors. */ exports.colors = [ "#0000CC", "#0000FF", "#0033CC", "#0033FF", "#0066CC", "#0066FF", "#0099CC", "#0099FF", "#00CC00", "#00CC33", "#00CC66", "#00CC99", "#00CCCC", "#00CCFF", "#3300CC", "#3300FF", "#3333CC", "#3333FF", "#3366CC", "#3366FF", "#3399CC", "#3399FF", "#33CC00", "#33CC33", "#33CC66", "#33CC99", "#33CCCC", "#33CCFF", "#6600CC", "#6600FF", "#6633CC", "#6633FF", "#66CC00", "#66CC33", "#9900CC", "#9900FF", "#9933CC", "#9933FF", "#99CC00", "#99CC33", "#CC0000", "#CC0033", "#CC0066", "#CC0099", "#CC00CC", "#CC00FF", "#CC3300", "#CC3333", "#CC3366", "#CC3399", "#CC33CC", "#CC33FF", "#CC6600", "#CC6633", "#CC9900", "#CC9933", "#CCCC00", "#CCCC33", "#FF0000", "#FF0033", "#FF0066", "#FF0099", "#FF00CC", "#FF00FF", "#FF3300", "#FF3333", "#FF3366", "#FF3399", "#FF33CC", "#FF33FF", "#FF6600", "#FF6633", "#FF9900", "#FF9933", "#FFCC00", "#FFCC33" ]; /** * Currently only WebKit-based Web Inspectors, Firefox >= v31, * and the Firebug extension (any Firefox version) are known * to support "%c" CSS customizations. * * TODO: add a `localStorage` variable to explicitly enable/disable colors */ function useColors$1() { if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) return true; if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) return false; let m$1; return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || typeof navigator !== "undefined" && navigator.userAgent && (m$1 = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m$1[1], 10) >= 31 || typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); } /** * Colorize log arguments if enabled. * * @api public */ function formatArgs$1(args) { args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module.exports.humanize(this.diff); if (!this.useColors) return; const c = "color: " + this.color; args.splice(1, 0, c, "color: inherit"); let index = 0; let lastC = 0; args[0].replace(/%[a-zA-Z%]/g, (match$2) => { if (match$2 === "%%") return; index++; if (match$2 === "%c") lastC = index; }); args.splice(lastC, 0, c); } /** * Invokes `console.debug()` when available. * No-op when `console.debug` is not a "function". * If `console.debug` is not available, falls back * to `console.log`. * * @api public */ exports.log = console.debug || console.log || (() => {}); /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save$1(namespaces) { try { if (namespaces) exports.storage.setItem("debug", namespaces); else exports.storage.removeItem("debug"); } catch (error) {} } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load$1() { let r; try { r = exports.storage.getItem("debug") || exports.storage.getItem("DEBUG"); } catch (error) {} if (!r && typeof process !== "undefined" && "env" in process) r = process.env.DEBUG; return r; } /** * Localstorage attempts to return the localstorage. * * This is necessary because safari throws * when a user disables cookies/localstorage * and you attempt to access it. * * @return {LocalStorage} * @api private */ function localstorage() { try { return localStorage; } catch (error) {} } module.exports = require_common()(exports); const { formatters: formatters$1 } = module.exports; /** * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. */ formatters$1.j = function(v) { try { return JSON.stringify(v); } catch (error) { return "[UnexpectedJSONParseError]: " + error.message; } }; } }); //#endregion //#region node_modules/.deno/debug@4.4.1/node_modules/debug/src/node.js var require_node = __commonJS({ "node_modules/.deno/debug@4.4.1/node_modules/debug/src/node.js"(exports, module) { /** * Module dependencies. */ const tty = __require("tty"); const util$3 = __require("util"); /** * This is the Node.js implementation of `debug()`. */ exports.init = init; exports.log = log; exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.destroy = util$3.deprecate(() => {}, "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); /** * Colors. */ exports.colors = [ 6, 2, 3, 4, 5, 1 ]; try { const supportsColor = __require("supports-color"); if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) exports.colors = [ 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221 ]; } catch (error) {} /** * Build up the default `inspectOpts` object from the environment variables. * * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js */ exports.inspectOpts = Object.keys(process.env).filter((key) => { return /^debug_/i.test(key); }).reduce((obj, key) => { const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { return k.toUpperCase(); }); let val = process.env[key]; if (/^(yes|on|true|enabled)$/i.test(val)) val = true; else if (/^(no|off|false|disabled)$/i.test(val)) val = false; else if (val === "null") val = null; else val = Number(val); obj[prop] = val; return obj; }, {}); /** * Is stdout a TTY? Colored output is enabled when `true`. */ function useColors() { return "colors" in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); } /** * Adds ANSI color escape codes if enabled. * * @api public */ function formatArgs(args) { const { namespace: name, useColors: useColors$2 } = this; if (useColors$2) { const c = this.color; const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); const prefix$1 = ` ${colorCode};1m${name} \u001B[0m`; args[0] = prefix$1 + args[0].split("\n").join("\n" + prefix$1); args.push(colorCode + "m+" + module.exports.humanize(this.diff) + "\x1B[0m"); } else args[0] = getDate() + name + " " + args[0]; } function getDate() { if (exports.inspectOpts.hideDate) return ""; return new Date().toISOString() + " "; } /** * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. */ function log(...args) { return process.stderr.write(util$3.formatWithOptions(exports.inspectOpts, ...args) + "\n"); } /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { if (namespaces) process.env.DEBUG = namespaces; else delete process.env.DEBUG; } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { return process.env.DEBUG; } /** * Init logic for `debug` instances. * * Create a new `inspectOpts` object in case `useColors` is set * differently for a particular `debug` instance. */ function init(debug$3) { debug$3.inspectOpts = {}; const keys = Object.keys(exports.inspectOpts); for (let i = 0; i < keys.length; i++) debug$3.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; } module.exports = require_common()(exports); const { formatters } = module.exports; /** * Map %o to `util.inspect()`, all on a single line. */ formatters.o = function(v) { this.inspectOpts.colors = this.useColors; return util$3.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" "); }; /** * Map %O to `util.inspect()`, allowing multiple lines if needed. */ formatters.O = function(v) { this.inspectOpts.colors = this.useColors; return util$3.inspect(v, this.inspectOpts); }; } }); //#endregion //#region node_modules/.deno/debug@4.4.1/node_modules/debug/src/index.js var require_src = __commonJS({ "node_modules/.deno/debug@4.4.1/node_modules/debug/src/index.js"(exports, module) { /** * Detect Electron renderer / nwjs process, which is node, but we should * treat as a browser. */ if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) module.exports = require_browser(); else module.exports = require_node(); } }); //#endregion //#region node_modules/.deno/agent-base@7.1.3/node_modules/agent-base/dist/helpers.js var require_helpers = __commonJS({ "node_modules/.deno/agent-base@7.1.3/node_modules/agent-base/dist/helpers.js"(exports) { var __createBinding$17 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$17 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$17 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$17(result, mod, k); } __setModuleDefault$17(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.req = exports.json = exports.toBuffer = void 0; const http$1 = __importStar$17(__require("http")); const https = __importStar$17(__require("https")); async function toBuffer(stream$3) { let length = 0; const chunks = []; for await (const chunk of stream$3) { length += chunk.length; chunks.push(chunk); } return Buffer.concat(chunks, length); } exports.toBuffer = toBuffer; async function json(stream$3) { const buf = await toBuffer(stream$3); const str = buf.toString("utf8"); try { return JSON.parse(str); } catch (_err) { const err = _err; err.message += ` (input: ${str})`; throw err; } } exports.json = json; function req(url$1, opts = {}) { const href = typeof url$1 === "string" ? url$1 : url$1.href; const req$1 = (href.startsWith("https:") ? https : http$1).request(url$1, opts); const promise = new Promise((resolve, reject) => { req$1.once("response", resolve).once("error", reject).end(); }); req$1.then = promise.then.bind(promise); return req$1; } exports.req = req; } }); //#endregion //#region node_modules/.deno/agent-base@7.1.3/node_modules/agent-base/dist/index.js var require_dist$4 = __commonJS({ "node_modules/.deno/agent-base@7.1.3/node_modules/agent-base/dist/index.js"(exports) { var __createBinding$16 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$16 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$16 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$16(result, mod, k); } __setModuleDefault$16(result, mod); return result; }; var __exportStar = exports && exports.__exportStar || function(m$1, exports$1) { for (var p in m$1) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports$1, p)) __createBinding$16(exports$1, m$1, p); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Agent = void 0; const net$2 = __importStar$16(__require("net")); const http = __importStar$16(__require("http")); const https_1 = __require("https"); __exportStar(require_helpers(), exports); const INTERNAL = Symbol("AgentBaseInternalState"); var Agent = class extends http.Agent { constructor(opts) { super(opts); this[INTERNAL] = {}; } /** * Determine whether this is an `http` or `https` request. */ isSecureEndpoint(options) { if (options) { if (typeof options.secureEndpoint === "boolean") return options.secureEndpoint; if (typeof options.protocol === "string") return options.protocol === "https:"; } const { stack } = new Error(); if (typeof stack !== "string") return false; return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1); } incrementSockets(name) { if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) return null; if (!this.sockets[name]) this.sockets[name] = []; const fakeSocket = new net$2.Socket({ writable: false }); this.sockets[name].push(fakeSocket); this.totalSocketCount++; return fakeSocket; } decrementSockets(name, socket) { if (!this.sockets[name] || socket === null) return; const sockets = this.sockets[name]; const index = sockets.indexOf(socket); if (index !== -1) { sockets.splice(index, 1); this.totalSocketCount--; if (sockets.length === 0) delete this.sockets[name]; } } getName(options) { const secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : this.isSecureEndpoint(options); if (secureEndpoint) return https_1.Agent.prototype.getName.call(this, options); return super.getName(options); } createSocket(req$1, options, cb) { const connectOpts = { ...options, secureEndpoint: this.isSecureEndpoint(options) }; const name = this.getName(connectOpts); const fakeSocket = this.incrementSockets(name); Promise.resolve().then(() => this.connect(req$1, connectOpts)).then((socket) => { this.decrementSockets(name, fakeSocket); if (socket instanceof http.Agent) try { return socket.addRequest(req$1, connectOpts); } catch (err) { return cb(err); } this[INTERNAL].currentSocket = socket; super.createSocket(req$1, options, cb); }, (err) => { this.decrementSockets(name, fakeSocket); cb(err); }); } createConnection() { const socket = this[INTERNAL].currentSocket; this[INTERNAL].currentSocket = void 0; if (!socket) throw new Error("No socket was returned in the `connect()` function"); return socket; } get defaultPort() { return this[INTERNAL].defaultPort ?? (this.protocol === "https:" ? 443 : 80); } set defaultPort(v) { if (this[INTERNAL]) this[INTERNAL].defaultPort = v; } get protocol() { return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:"); } set protocol(v) { if (this[INTERNAL]) this[INTERNAL].protocol = v; } }; exports.Agent = Agent; } }); //#endregion //#region node_modules/.deno/https-proxy-agent@7.0.6/node_modules/https-proxy-agent/dist/parse-proxy-response.js var require_parse_proxy_response = __commonJS({ "node_modules/.deno/https-proxy-agent@7.0.6/node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports) { var __importDefault$5 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.parseProxyResponse = void 0; const debug_1$2 = __importDefault$5(require_src()); const debug$2 = (0, debug_1$2.default)("https-proxy-agent:parse-proxy-response"); function parseProxyResponse(socket) { return new Promise((resolve, reject) => { let buffersLength = 0; const buffers = []; function read() { const b = socket.read(); if (b) ondata(b); else socket.once("readable", read); } function cleanup() { socket.removeListener("end", onend); socket.removeListener("error", onerror); socket.removeListener("readable", read); } function onend() { cleanup(); debug$2("onend"); reject(new Error("Proxy connection ended before receiving CONNECT response")); } function onerror(err) { cleanup(); debug$2("onerror %o", err); reject(err); } function ondata(b) { buffers.push(b); buffersLength += b.length; const buffered = Buffer.concat(buffers, buffersLength); const endOfHeaders = buffered.indexOf("\r\n\r\n"); if (endOfHeaders === -1) { debug$2("have not received end of HTTP headers yet..."); read(); return; } const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n"); const firstLine = headerParts.shift(); if (!firstLine) { socket.destroy(); return reject(new Error("No header received from proxy CONNECT response")); } const firstLineParts = firstLine.split(" "); const statusCode = +firstLineParts[1]; const statusText = firstLineParts.slice(2).join(" "); const headers = {}; for (const header of headerParts) { if (!header) continue; const firstColon = header.indexOf(":"); if (firstColon === -1) { socket.destroy(); return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); } const key = header.slice(0, firstColon).toLowerCase(); const value = header.slice(firstColon + 1).trimStart(); const current = headers[key]; if (typeof current === "string") headers[key] = [current, value]; else if (Array.isArray(current)) current.push(value); else headers[key] = value; } debug$2("got proxy server response: %o %o", firstLine, headers); cleanup(); resolve({ connect: { statusCode, statusText, headers }, buffered }); } socket.on("error", onerror); socket.on("end", onend); read(); }); } exports.parseProxyResponse = parseProxyResponse; } }); //#endregion //#region node_modules/.deno/https-proxy-agent@7.0.6/node_modules/https-proxy-agent/dist/index.js var require_dist$3 = __commonJS({ "node_modules/.deno/https-proxy-agent@7.0.6/node_modules/https-proxy-agent/dist/index.js"(exports) { var __createBinding$15 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$15 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$15 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$15(result, mod, k); } __setModuleDefault$15(result, mod); return result; }; var __importDefault$4 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.HttpsProxyAgent = void 0; const net$1 = __importStar$15(__require("net")); const tls$1 = __importStar$15(__require("tls")); const assert_1$8 = __importDefault$4(__require("assert")); const debug_1$1 = __importDefault$4(require_src()); const agent_base_1$1 = require_dist$4(); const url_1$2 = __require("url"); const parse_proxy_response_1 = require_parse_proxy_response(); const debug$1 = (0, debug_1$1.default)("https-proxy-agent"); const setServernameFromNonIpHost = (options) => { if (options.servername === void 0 && options.host && !net$1.isIP(options.host)) return { ...options, servername: options.host }; return options; }; /** * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. * * Outgoing HTTP requests are first tunneled through the proxy server using the * `CONNECT` HTTP request method to establish a connection to the proxy server, * and then the proxy server connects to the destination target and issues the * HTTP request from the proxy server. * * `https:` requests have their socket connection upgraded to TLS once * the connection to the proxy server has been established. */ var HttpsProxyAgent = class extends agent_base_1$1.Agent { constructor(proxy, opts) { super(opts); this.options = { path: void 0 }; this.proxy = typeof proxy === "string" ? new url_1$2.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; debug$1("Creating new HttpsProxyAgent instance: %o", this.proxy.href); const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; this.connectOpts = { ALPNProtocols: ["http/1.1"], ...opts ? omit$1(opts, "headers") : null, host, port }; } /** * Called when the node-core HTTP client library is creating a * new HTTP request. */ async connect(req$1, opts) { const { proxy } = this; if (!opts.host) throw new TypeError("No \"host\" provided"); let socket; if (proxy.protocol === "https:") { debug$1("Creating `tls.Socket`: %o", this.connectOpts); socket = tls$1.connect(setServernameFromNonIpHost(this.connectOpts)); } else { debug$1("Creating `net.Socket`: %o", this.connectOpts); socket = net$1.connect(this.connectOpts); } const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; const host = net$1.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; if (proxy.username || proxy.password) { const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth).toString("base64")}`; } headers.Host = `${host}:${opts.port}`; if (!headers["Proxy-Connection"]) headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; for (const name of Object.keys(headers)) payload += `${name}: ${headers[name]}\r\n`; const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); socket.write(`${payload}\r\n`); const { connect, buffered } = await proxyResponsePromise; req$1.emit("proxyConnect", connect); this.emit("proxyConnect", connect, req$1); if (connect.statusCode === 200) { req$1.once("socket", resume); if (opts.secureEndpoint) { debug$1("Upgrading socket connection to TLS"); return tls$1.connect({ ...omit$1(setServernameFromNonIpHost(opts), "host", "path", "port"), socket }); } return socket; } socket.destroy(); const fakeSocket = new net$1.Socket({ writable: false }); fakeSocket.readable = true; req$1.once("socket", (s$1) => { debug$1("Replaying proxy buffer for failed request"); (0, assert_1$8.default)(s$1.listenerCount("data") > 0); s$1.push(buffered); s$1.push(null); }); return fakeSocket; } }; HttpsProxyAgent.protocols = ["http", "https"]; exports.HttpsProxyAgent = HttpsProxyAgent; function resume(socket) { socket.resume(); } function omit$1(obj, ...keys) { const ret = {}; let key; for (key in obj) if (!keys.includes(key)) ret[key] = obj[key]; return ret; } } }); //#endregion //#region node_modules/.deno/http-proxy-agent@7.0.2/node_modules/http-proxy-agent/dist/index.js var require_dist$2 = __commonJS({ "node_modules/.deno/http-proxy-agent@7.0.2/node_modules/http-proxy-agent/dist/index.js"(exports) { var __createBinding$14 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$14 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$14 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$14(result, mod, k); } __setModuleDefault$14(result, mod); return result; }; var __importDefault$3 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.HttpProxyAgent = void 0; const net = __importStar$14(__require("net")); const tls = __importStar$14(__require("tls")); const debug_1 = __importDefault$3(require_src()); const events_1 = __require("events"); const agent_base_1 = require_dist$4(); const url_1$1 = __require("url"); const debug = (0, debug_1.default)("http-proxy-agent"); /** * The `HttpProxyAgent` implements an HTTP Agent subclass that connects * to the specified "HTTP proxy server" in order to proxy HTTP requests. */ var HttpProxyAgent = class extends agent_base_1.Agent { constructor(proxy, opts) { super(opts); this.proxy = typeof proxy === "string" ? new url_1$1.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; debug("Creating new HttpProxyAgent instance: %o", this.proxy.href); const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ""); const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80; this.connectOpts = { ...opts ? omit(opts, "headers") : null, host, port }; } addRequest(req$1, opts) { req$1._header = null; this.setRequestProps(req$1, opts); super.addRequest(req$1, opts); } setRequestProps(req$1, opts) { const { proxy } = this; const protocol = opts.secureEndpoint ? "https:" : "http:"; const hostname = req$1.getHeader("host") || "localhost"; const base = `${protocol}//${hostname}`; const url$1 = new url_1$1.URL(req$1.path, base); if (opts.port !== 80) url$1.port = String(opts.port); req$1.path = String(url$1); const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders }; if (proxy.username || proxy.password) { const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; headers["Proxy-Authorization"] = `Basic ${Buffer.from(auth).toString("base64")}`; } if (!headers["Proxy-Connection"]) headers["Proxy-Connection"] = this.keepAlive ? "Keep-Alive" : "close"; for (const name of Object.keys(headers)) { const value = headers[name]; if (value) req$1.setHeader(name, value); } } async connect(req$1, opts) { req$1._header = null; if (!req$1.path.includes("://")) this.setRequestProps(req$1, opts); let first; let endOfHeaders; debug("Regenerating stored HTTP header string for request"); req$1._implicitHeader(); if (req$1.outputData && req$1.outputData.length > 0) { debug("Patching connection write() output buffer with updated header"); first = req$1.outputData[0].data; endOfHeaders = first.indexOf("\r\n\r\n") + 4; req$1.outputData[0].data = req$1._header + first.substring(endOfHeaders); debug("Output buffer: %o", req$1.outputData[0].data); } let socket; if (this.proxy.protocol === "https:") { debug("Creating `tls.Socket`: %o", this.connectOpts); socket = tls.connect(this.connectOpts); } else { debug("Creating `net.Socket`: %o", this.connectOpts); socket = net.connect(this.connectOpts); } await (0, events_1.once)(socket, "connect"); return socket; } }; HttpProxyAgent.protocols = ["http", "https"]; exports.HttpProxyAgent = HttpProxyAgent; function omit(obj, ...keys) { const ret = {}; let key; for (key in obj) if (!keys.includes(key)) ret[key] = obj[key]; return ret; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js var require_proxyPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/proxyPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.globalNoProxyList = exports.proxyPolicyName = void 0; exports.loadNoProxy = loadNoProxy; exports.getDefaultProxySettings = getDefaultProxySettings$1; exports.proxyPolicy = proxyPolicy$1; const https_proxy_agent_1 = require_dist$3(); const http_proxy_agent_1 = require_dist$2(); const log_js_1$7 = require_log$2(); const HTTPS_PROXY = "HTTPS_PROXY"; const HTTP_PROXY = "HTTP_PROXY"; const ALL_PROXY = "ALL_PROXY"; const NO_PROXY = "NO_PROXY"; /** * The programmatic identifier of the proxyPolicy. */ exports.proxyPolicyName = "proxyPolicy"; /** * Stores the patterns specified in NO_PROXY environment variable. * @internal */ exports.globalNoProxyList = []; let noProxyListLoaded = false; /** A cache of whether a host should bypass the proxy. */ const globalBypassedMap = new Map(); function getEnvironmentValue(name) { if (process.env[name]) return process.env[name]; else if (process.env[name.toLowerCase()]) return process.env[name.toLowerCase()]; return void 0; } function loadEnvironmentProxyValue() { if (!process) return void 0; const httpsProxy = getEnvironmentValue(HTTPS_PROXY); const allProxy = getEnvironmentValue(ALL_PROXY); const httpProxy = getEnvironmentValue(HTTP_PROXY); return httpsProxy || allProxy || httpProxy; } /** * Check whether the host of a given `uri` matches any pattern in the no proxy list. * If there's a match, any request sent to the same host shouldn't have the proxy settings set. * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 */ function isBypassed(uri, noProxyList, bypassedMap) { if (noProxyList.length === 0) return false; const host = new URL(uri).hostname; if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) return bypassedMap.get(host); let isBypassedFlag = false; for (const pattern of noProxyList) if (pattern[0] === ".") { if (host.endsWith(pattern)) isBypassedFlag = true; else if (host.length === pattern.length - 1 && host === pattern.slice(1)) isBypassedFlag = true; } else if (host === pattern) isBypassedFlag = true; bypassedMap === null || bypassedMap === void 0 || bypassedMap.set(host, isBypassedFlag); return isBypassedFlag; } function loadNoProxy() { const noProxy = getEnvironmentValue(NO_PROXY); noProxyListLoaded = true; if (noProxy) return noProxy.split(",").map((item) => item.trim()).filter((item) => item.length); return []; } /** * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. * If no argument is given, it attempts to parse a proxy URL from the environment * variables `HTTPS_PROXY` or `HTTP_PROXY`. * @param proxyUrl - The url of the proxy to use. May contain authentication information. * @deprecated - Internally this method is no longer necessary when setting proxy information. */ function getDefaultProxySettings$1(proxyUrl) { if (!proxyUrl) { proxyUrl = loadEnvironmentProxyValue(); if (!proxyUrl) return void 0; } const parsedUrl = new URL(proxyUrl); const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; return { host: schema + parsedUrl.hostname, port: Number.parseInt(parsedUrl.port || "80"), username: parsedUrl.username, password: parsedUrl.password }; } /** * This method attempts to parse a proxy URL from the environment * variables `HTTPS_PROXY` or `HTTP_PROXY`. */ function getDefaultProxySettingsInternal() { const envProxy = loadEnvironmentProxyValue(); return envProxy ? new URL(envProxy) : void 0; } function getUrlFromProxySettings(settings) { let parsedProxyUrl; try { parsedProxyUrl = new URL(settings.host); } catch (_a$2) { throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); } parsedProxyUrl.port = String(settings.port); if (settings.username) parsedProxyUrl.username = settings.username; if (settings.password) parsedProxyUrl.password = settings.password; return parsedProxyUrl; } function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { if (request.agent) return; const url$1 = new URL(request.url); const isInsecure = url$1.protocol !== "https:"; if (request.tlsSettings) log_js_1$7.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); const headers = request.headers.toJSON(); if (isInsecure) { if (!cachedAgents.httpProxyAgent) cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); request.agent = cachedAgents.httpProxyAgent; } else { if (!cachedAgents.httpsProxyAgent) cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); request.agent = cachedAgents.httpsProxyAgent; } } /** * A policy that allows one to apply proxy settings to all requests. * If not passed static settings, they will be retrieved from the HTTPS_PROXY * or HTTP_PROXY environment variables. * @param proxySettings - ProxySettings to use on each request. * @param options - additional settings, for example, custom NO_PROXY patterns */ function proxyPolicy$1(proxySettings, options) { if (!noProxyListLoaded) exports.globalNoProxyList.push(...loadNoProxy()); const defaultProxy = proxySettings ? getUrlFromProxySettings(proxySettings) : getDefaultProxySettingsInternal(); const cachedAgents = {}; return { name: exports.proxyPolicyName, async sendRequest(request, next) { var _a$2; if (!request.proxySettings && defaultProxy && !isBypassed(request.url, (_a$2 = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a$2 !== void 0 ? _a$2 : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? void 0 : globalBypassedMap)) setProxyAgentOnRequest(request, cachedAgents, defaultProxy); else if (request.proxySettings) setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js var require_agentPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/agentPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.agentPolicyName = void 0; exports.agentPolicy = agentPolicy$1; /** * Name of the Agent Policy */ exports.agentPolicyName = "agentPolicy"; /** * Gets a pipeline policy that sets http.agent */ function agentPolicy$1(agent) { return { name: exports.agentPolicyName, sendRequest: async (req$1, next) => { if (!req$1.agent) req$1.agent = agent; return next(req$1); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js var require_tlsPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/tlsPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.tlsPolicyName = void 0; exports.tlsPolicy = tlsPolicy$1; /** * Name of the TLS Policy */ exports.tlsPolicyName = "tlsPolicy"; /** * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. */ function tlsPolicy$1(tlsSettings) { return { name: exports.tlsPolicyName, sendRequest: async (req$1, next) => { if (!req$1.tlsSettings) req$1.tlsSettings = tlsSettings; return next(req$1); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js var require_typeGuards$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/typeGuards.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isNodeReadableStream = isNodeReadableStream$1; exports.isWebReadableStream = isWebReadableStream; exports.isBinaryBody = isBinaryBody; exports.isReadableStream = isReadableStream; exports.isBlob = isBlob; function isNodeReadableStream$1(x) { return Boolean(x && typeof x["pipe"] === "function"); } function isWebReadableStream(x) { return Boolean(x && typeof x.getReader === "function" && typeof x.tee === "function"); } function isBinaryBody(body$1) { return body$1 !== void 0 && (body$1 instanceof Uint8Array || isReadableStream(body$1) || typeof body$1 === "function" || body$1 instanceof Blob); } function isReadableStream(x) { return isNodeReadableStream$1(x) || isWebReadableStream(x); } function isBlob(x) { return typeof x.stream === "function"; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js var require_concat = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/concat.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.concat = concat; const tslib_1$5 = require_tslib(); const stream_1 = __require("stream"); const typeGuards_js_1$4 = require_typeGuards$1(); function streamAsyncIterator() { return tslib_1$5.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { const reader = this.getReader(); try { while (true) { const { done, value } = yield tslib_1$5.__await(reader.read()); if (done) return yield tslib_1$5.__await(void 0); yield yield tslib_1$5.__await(value); } } finally { reader.releaseLock(); } }); } function makeAsyncIterable(webStream) { if (!webStream[Symbol.asyncIterator]) webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); if (!webStream.values) webStream.values = streamAsyncIterator.bind(webStream); } function ensureNodeStream(stream$3) { if (stream$3 instanceof ReadableStream) { makeAsyncIterable(stream$3); return stream_1.Readable.fromWeb(stream$3); } else return stream$3; } function toStream(source) { if (source instanceof Uint8Array) return stream_1.Readable.from(Buffer.from(source)); else if ((0, typeGuards_js_1$4.isBlob)(source)) return ensureNodeStream(source.stream()); else return ensureNodeStream(source); } /** * Utility function that concatenates a set of binary inputs into one combined output. * * @param sources - array of sources for the concatenation * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. * In browser, returns a `Blob` representing all the concatenated inputs. * * @internal */ async function concat(sources) { return function() { const streams = sources.map((x) => typeof x === "function" ? x() : x).map(toStream); return stream_1.Readable.from(function() { return tslib_1$5.__asyncGenerator(this, arguments, function* () { var _a$2, e_1, _b$1, _c$1; for (const stream$3 of streams) try { for (var _d$1 = true, stream_2 = (e_1 = void 0, tslib_1$5.__asyncValues(stream$3)), stream_2_1; stream_2_1 = yield tslib_1$5.__await(stream_2.next()), _a$2 = stream_2_1.done, !_a$2; _d$1 = true) { _c$1 = stream_2_1.value; _d$1 = false; const chunk = _c$1; yield yield tslib_1$5.__await(chunk); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = stream_2.return)) yield tslib_1$5.__await(_b$1.call(stream_2)); } finally { if (e_1) throw e_1.error; } } }); }()); }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js var require_multipartPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/multipartPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.multipartPolicyName = void 0; exports.multipartPolicy = multipartPolicy$1; const bytesEncoding_js_1$4 = require_bytesEncoding(); const typeGuards_js_1$3 = require_typeGuards$1(); const uuidUtils_js_1$1 = require_uuidUtils(); const concat_js_1 = require_concat(); function generateBoundary() { return `----AzSDKFormBoundary${(0, uuidUtils_js_1$1.randomUUID)()}`; } function encodeHeaders(headers) { let result = ""; for (const [key, value] of headers) result += `${key}: ${value}\r\n`; return result; } function getLength(source) { if (source instanceof Uint8Array) return source.byteLength; else if ((0, typeGuards_js_1$3.isBlob)(source)) return source.size === -1 ? void 0 : source.size; else return void 0; } function getTotalLength(sources) { let total = 0; for (const source of sources) { const partLength = getLength(source); if (partLength === void 0) return void 0; else total += partLength; } return total; } async function buildRequestBody(request, parts, boundary) { const sources = [ (0, bytesEncoding_js_1$4.stringToUint8Array)(`--${boundary}`, "utf-8"), ...parts.flatMap((part) => [ (0, bytesEncoding_js_1$4.stringToUint8Array)("\r\n", "utf-8"), (0, bytesEncoding_js_1$4.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), (0, bytesEncoding_js_1$4.stringToUint8Array)("\r\n", "utf-8"), part.body, (0, bytesEncoding_js_1$4.stringToUint8Array)(`\r\n--${boundary}`, "utf-8") ]), (0, bytesEncoding_js_1$4.stringToUint8Array)("--\r\n\r\n", "utf-8") ]; const contentLength$1 = getTotalLength(sources); if (contentLength$1) request.headers.set("Content-Length", contentLength$1); request.body = await (0, concat_js_1.concat)(sources); } /** * Name of multipart policy */ exports.multipartPolicyName = "multipartPolicy"; const maxBoundaryLength = 70; const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); function assertValidBoundary(boundary) { if (boundary.length > maxBoundaryLength) throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); } /** * Pipeline policy for multipart requests */ function multipartPolicy$1() { return { name: exports.multipartPolicyName, async sendRequest(request, next) { var _a$2; if (!request.multipartBody) return next(request); if (request.body) throw new Error("multipartBody and regular body cannot be set at the same time"); let boundary = request.multipartBody.boundary; const contentTypeHeader = (_a$2 = request.headers.get("Content-Type")) !== null && _a$2 !== void 0 ? _a$2 : "multipart/mixed"; const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); if (!parsedHeader) throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); const [, contentType$1, parsedBoundary] = parsedHeader; if (parsedBoundary && boundary && parsedBoundary !== boundary) throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); boundary !== null && boundary !== void 0 || (boundary = parsedBoundary); if (boundary) assertValidBoundary(boundary); else boundary = generateBoundary(); request.headers.set("Content-Type", `${contentType$1}; boundary=${boundary}`); await buildRequestBody(request, request.multipartBody.parts, boundary); request.multipartBody = void 0; return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js var require_createPipelineFromOptions$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/createPipelineFromOptions.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createPipelineFromOptions = createPipelineFromOptions$1; const logPolicy_js_1$3 = require_logPolicy$1(); const pipeline_js_1$5 = require_pipeline$2(); const redirectPolicy_js_1$3 = require_redirectPolicy$1(); const userAgentPolicy_js_1$3 = require_userAgentPolicy$1(); const decompressResponsePolicy_js_1$3 = require_decompressResponsePolicy$1(); const defaultRetryPolicy_js_1$3 = require_defaultRetryPolicy$1(); const formDataPolicy_js_1$3 = require_formDataPolicy$1(); const checkEnvironment_js_1$2 = require_checkEnvironment(); const proxyPolicy_js_1$3 = require_proxyPolicy$1(); const agentPolicy_js_1$3 = require_agentPolicy$1(); const tlsPolicy_js_1$3 = require_tlsPolicy$1(); const multipartPolicy_js_1$3 = require_multipartPolicy$1(); /** * Create a new pipeline with a default set of customizable policies. * @param options - Options to configure a custom pipeline. */ function createPipelineFromOptions$1(options) { const pipeline = (0, pipeline_js_1$5.createEmptyPipeline)(); if (checkEnvironment_js_1$2.isNodeLike) { if (options.agent) pipeline.addPolicy((0, agentPolicy_js_1$3.agentPolicy)(options.agent)); if (options.tlsOptions) pipeline.addPolicy((0, tlsPolicy_js_1$3.tlsPolicy)(options.tlsOptions)); pipeline.addPolicy((0, proxyPolicy_js_1$3.proxyPolicy)(options.proxyOptions)); pipeline.addPolicy((0, decompressResponsePolicy_js_1$3.decompressResponsePolicy)()); } pipeline.addPolicy((0, formDataPolicy_js_1$3.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1$3.multipartPolicyName] }); pipeline.addPolicy((0, userAgentPolicy_js_1$3.userAgentPolicy)(options.userAgentOptions)); pipeline.addPolicy((0, multipartPolicy_js_1$3.multipartPolicy)(), { afterPhase: "Deserialize" }); pipeline.addPolicy((0, defaultRetryPolicy_js_1$3.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); if (checkEnvironment_js_1$2.isNodeLike) pipeline.addPolicy((0, redirectPolicy_js_1$3.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); pipeline.addPolicy((0, logPolicy_js_1$3.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); return pipeline; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js var require_apiVersionPolicy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/apiVersionPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.apiVersionPolicyName = void 0; exports.apiVersionPolicy = apiVersionPolicy; exports.apiVersionPolicyName = "ApiVersionPolicy"; /** * Creates a policy that sets the apiVersion as a query parameter on every request * @param options - Client options * @returns Pipeline policy that sets the apiVersion as a query parameter on every request */ function apiVersionPolicy(options) { return { name: exports.apiVersionPolicyName, sendRequest: (req$1, next) => { const url$1 = new URL(req$1.url); if (!url$1.searchParams.get("api-version") && options.apiVersion) req$1.url = `${req$1.url}${Array.from(url$1.searchParams.keys()).length > 0 ? "&" : "?"}api-version=${options.apiVersion}`; return next(req$1); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js var require_credentials = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/auth/credentials.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isOAuth2TokenCredential = isOAuth2TokenCredential; exports.isBearerTokenCredential = isBearerTokenCredential; exports.isBasicCredential = isBasicCredential; exports.isApiKeyCredential = isApiKeyCredential; /** * Type guard to check if a credential is an OAuth2 token credential. */ function isOAuth2TokenCredential(credential) { return "getOAuth2Token" in credential; } /** * Type guard to check if a credential is a Bearer token credential. */ function isBearerTokenCredential(credential) { return "getBearerToken" in credential; } /** * Type guard to check if a credential is a Basic auth credential. */ function isBasicCredential(credential) { return "username" in credential && "password" in credential; } /** * Type guard to check if a credential is an API key credential. */ function isApiKeyCredential(credential) { return "key" in credential; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js var require_checkInsecureConnection = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/checkInsecureConnection.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ensureSecureConnection = ensureSecureConnection; const log_js_1$6 = require_log$2(); let insecureConnectionWarningEmmitted = false; /** * Checks if the request is allowed to be sent over an insecure connection. * * A request is allowed to be sent over an insecure connection when: * - The `allowInsecureConnection` option is set to `true`. * - The request has the `allowInsecureConnection` property set to `true`. * - The request is being sent to `localhost` or `127.0.0.1` */ function allowInsecureConnection(request, options) { if (options.allowInsecureConnection && request.allowInsecureConnection) { const url$1 = new URL(request.url); if (url$1.hostname === "localhost" || url$1.hostname === "127.0.0.1") return true; } return false; } /** * Logs a warning about sending a token over an insecure connection. * * This function will emit a node warning once, but log the warning every time. */ function emitInsecureConnectionWarning() { const warning = "Sending token over insecure transport. Assume any token issued is compromised."; log_js_1$6.logger.warning(warning); if (typeof (process === null || process === void 0 ? void 0 : process.emitWarning) === "function" && !insecureConnectionWarningEmmitted) { insecureConnectionWarningEmmitted = true; process.emitWarning(warning); } } /** * Ensures that authentication is only allowed over HTTPS unless explicitly allowed. * Throws an error if the connection is not secure and not explicitly allowed. */ function ensureSecureConnection(request, options) { if (!request.url.toLowerCase().startsWith("https://")) if (allowInsecureConnection(request, options)) emitInsecureConnectionWarning(); else throw new Error("Authentication is not permitted for non-TLS protected (non-https) URLs when allowInsecureConnection is false."); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js var require_apiKeyAuthenticationPolicy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/apiKeyAuthenticationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.apiKeyAuthenticationPolicyName = void 0; exports.apiKeyAuthenticationPolicy = apiKeyAuthenticationPolicy; const checkInsecureConnection_js_1$3 = require_checkInsecureConnection(); /** * Name of the API Key Authentication Policy */ exports.apiKeyAuthenticationPolicyName = "apiKeyAuthenticationPolicy"; /** * Gets a pipeline policy that adds API key authentication to requests */ function apiKeyAuthenticationPolicy(options) { return { name: exports.apiKeyAuthenticationPolicyName, async sendRequest(request, next) { var _a$2, _b$1; (0, checkInsecureConnection_js_1$3.ensureSecureConnection)(request, options); const scheme = (_b$1 = (_a$2 = request.authSchemes) !== null && _a$2 !== void 0 ? _a$2 : options.authSchemes) === null || _b$1 === void 0 ? void 0 : _b$1.find((x) => x.kind === "apiKey"); if (!scheme) return next(request); if (scheme.apiKeyLocation !== "header") throw new Error(`Unsupported API key location: ${scheme.apiKeyLocation}`); request.headers.set(scheme.name, options.credential.key); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js var require_basicAuthenticationPolicy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/basicAuthenticationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.basicAuthenticationPolicyName = void 0; exports.basicAuthenticationPolicy = basicAuthenticationPolicy; const bytesEncoding_js_1$3 = require_bytesEncoding(); const checkInsecureConnection_js_1$2 = require_checkInsecureConnection(); /** * Name of the Basic Authentication Policy */ exports.basicAuthenticationPolicyName = "bearerAuthenticationPolicy"; /** * Gets a pipeline policy that adds basic authentication to requests */ function basicAuthenticationPolicy(options) { return { name: exports.basicAuthenticationPolicyName, async sendRequest(request, next) { var _a$2, _b$1; (0, checkInsecureConnection_js_1$2.ensureSecureConnection)(request, options); const scheme = (_b$1 = (_a$2 = request.authSchemes) !== null && _a$2 !== void 0 ? _a$2 : options.authSchemes) === null || _b$1 === void 0 ? void 0 : _b$1.find((x) => x.kind === "http" && x.scheme === "basic"); if (!scheme) return next(request); const { username, password } = options.credential; const headerValue = (0, bytesEncoding_js_1$3.uint8ArrayToString)((0, bytesEncoding_js_1$3.stringToUint8Array)(`${username}:${password}`, "utf-8"), "base64"); request.headers.set("Authorization", `Basic ${headerValue}`); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js var require_bearerAuthenticationPolicy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/bearerAuthenticationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.bearerAuthenticationPolicyName = void 0; exports.bearerAuthenticationPolicy = bearerAuthenticationPolicy; const checkInsecureConnection_js_1$1 = require_checkInsecureConnection(); /** * Name of the Bearer Authentication Policy */ exports.bearerAuthenticationPolicyName = "bearerAuthenticationPolicy"; /** * Gets a pipeline policy that adds bearer token authentication to requests */ function bearerAuthenticationPolicy(options) { return { name: exports.bearerAuthenticationPolicyName, async sendRequest(request, next) { var _a$2, _b$1; (0, checkInsecureConnection_js_1$1.ensureSecureConnection)(request, options); const scheme = (_b$1 = (_a$2 = request.authSchemes) !== null && _a$2 !== void 0 ? _a$2 : options.authSchemes) === null || _b$1 === void 0 ? void 0 : _b$1.find((x) => x.kind === "http" && x.scheme === "bearer"); if (!scheme) return next(request); const token = await options.credential.getBearerToken({ abortSignal: request.abortSignal }); request.headers.set("Authorization", `Bearer ${token}`); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js var require_oauth2AuthenticationPolicy = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/auth/oauth2AuthenticationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.oauth2AuthenticationPolicyName = void 0; exports.oauth2AuthenticationPolicy = oauth2AuthenticationPolicy; const checkInsecureConnection_js_1 = require_checkInsecureConnection(); /** * Name of the OAuth2 Authentication Policy */ exports.oauth2AuthenticationPolicyName = "oauth2AuthenticationPolicy"; /** * Gets a pipeline policy that adds authorization header from OAuth2 schemes */ function oauth2AuthenticationPolicy(options) { return { name: exports.oauth2AuthenticationPolicyName, async sendRequest(request, next) { var _a$2, _b$1; (0, checkInsecureConnection_js_1.ensureSecureConnection)(request, options); const scheme = (_b$1 = (_a$2 = request.authSchemes) !== null && _a$2 !== void 0 ? _a$2 : options.authSchemes) === null || _b$1 === void 0 ? void 0 : _b$1.find((x) => x.kind === "oauth2"); if (!scheme) return next(request); const token = await options.credential.getOAuth2Token(scheme.flows, { abortSignal: request.abortSignal }); request.headers.set("Authorization", `Bearer ${token}`); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js var require_clientHelpers = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/clientHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createDefaultPipeline = createDefaultPipeline$1; exports.getCachedDefaultHttpsClient = getCachedDefaultHttpsClient; const defaultHttpClient_js_1$2 = require_defaultHttpClient$1(); const createPipelineFromOptions_js_1$1 = require_createPipelineFromOptions$1(); const apiVersionPolicy_js_1 = require_apiVersionPolicy(); const credentials_js_1 = require_credentials(); const apiKeyAuthenticationPolicy_js_1 = require_apiKeyAuthenticationPolicy(); const basicAuthenticationPolicy_js_1 = require_basicAuthenticationPolicy(); const bearerAuthenticationPolicy_js_1 = require_bearerAuthenticationPolicy(); const oauth2AuthenticationPolicy_js_1 = require_oauth2AuthenticationPolicy(); let cachedHttpClient$1; /** * Creates a default rest pipeline to re-use accross Rest Level Clients */ function createDefaultPipeline$1(options = {}) { const pipeline = (0, createPipelineFromOptions_js_1$1.createPipelineFromOptions)(options); pipeline.addPolicy((0, apiVersionPolicy_js_1.apiVersionPolicy)(options)); const { credential, authSchemes, allowInsecureConnection: allowInsecureConnection$1 } = options; if (credential) { if ((0, credentials_js_1.isApiKeyCredential)(credential)) pipeline.addPolicy((0, apiKeyAuthenticationPolicy_js_1.apiKeyAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection: allowInsecureConnection$1 })); else if ((0, credentials_js_1.isBasicCredential)(credential)) pipeline.addPolicy((0, basicAuthenticationPolicy_js_1.basicAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection: allowInsecureConnection$1 })); else if ((0, credentials_js_1.isBearerTokenCredential)(credential)) pipeline.addPolicy((0, bearerAuthenticationPolicy_js_1.bearerAuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection: allowInsecureConnection$1 })); else if ((0, credentials_js_1.isOAuth2TokenCredential)(credential)) pipeline.addPolicy((0, oauth2AuthenticationPolicy_js_1.oauth2AuthenticationPolicy)({ authSchemes, credential, allowInsecureConnection: allowInsecureConnection$1 })); } return pipeline; } function getCachedDefaultHttpsClient() { if (!cachedHttpClient$1) cachedHttpClient$1 = (0, defaultHttpClient_js_1$2.createDefaultHttpClient)(); return cachedHttpClient$1; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js var require_multipart = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/multipart.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.buildBodyPart = buildBodyPart; exports.buildMultipartBody = buildMultipartBody; const restError_js_1$6 = require_restError$2(); const httpHeaders_js_1$4 = require_httpHeaders$1(); const bytesEncoding_js_1$2 = require_bytesEncoding(); const typeGuards_js_1$2 = require_typeGuards$1(); /** * Get value of a header in the part descriptor ignoring case */ function getHeaderValue(descriptor, headerName) { if (descriptor.headers) { const actualHeaderName = Object.keys(descriptor.headers).find((x) => x.toLowerCase() === headerName.toLowerCase()); if (actualHeaderName) return descriptor.headers[actualHeaderName]; } return void 0; } function getPartContentType(descriptor) { const contentTypeHeader = getHeaderValue(descriptor, "content-type"); if (contentTypeHeader) return contentTypeHeader; if (descriptor.contentType === null) return void 0; if (descriptor.contentType) return descriptor.contentType; const { body: body$1 } = descriptor; if (body$1 === null || body$1 === void 0) return void 0; if (typeof body$1 === "string" || typeof body$1 === "number" || typeof body$1 === "boolean") return "text/plain; charset=UTF-8"; if (body$1 instanceof Blob) return body$1.type || "application/octet-stream"; if ((0, typeGuards_js_1$2.isBinaryBody)(body$1)) return "application/octet-stream"; return "application/json"; } /** * Enclose value in quotes and escape special characters, for use in the Content-Disposition header */ function escapeDispositionField(value) { return JSON.stringify(value); } function getContentDisposition(descriptor) { var _a$2; const contentDispositionHeader = getHeaderValue(descriptor, "content-disposition"); if (contentDispositionHeader) return contentDispositionHeader; if (descriptor.dispositionType === void 0 && descriptor.name === void 0 && descriptor.filename === void 0) return void 0; const dispositionType = (_a$2 = descriptor.dispositionType) !== null && _a$2 !== void 0 ? _a$2 : "form-data"; let disposition = dispositionType; if (descriptor.name) disposition += `; name=${escapeDispositionField(descriptor.name)}`; let filename = void 0; if (descriptor.filename) filename = descriptor.filename; else if (typeof File !== "undefined" && descriptor.body instanceof File) { const filenameFromFile = descriptor.body.name; if (filenameFromFile !== "") filename = filenameFromFile; } if (filename) disposition += `; filename=${escapeDispositionField(filename)}`; return disposition; } function normalizeBody(body$1, contentType$1) { if (body$1 === void 0) return new Uint8Array([]); if ((0, typeGuards_js_1$2.isBinaryBody)(body$1)) return body$1; if (typeof body$1 === "string" || typeof body$1 === "number" || typeof body$1 === "boolean") return (0, bytesEncoding_js_1$2.stringToUint8Array)(String(body$1), "utf-8"); if (contentType$1 && /application\/(.+\+)?json(;.+)?/i.test(String(contentType$1))) return (0, bytesEncoding_js_1$2.stringToUint8Array)(JSON.stringify(body$1), "utf-8"); throw new restError_js_1$6.RestError(`Unsupported body/content-type combination: ${body$1}, ${contentType$1}`); } function buildBodyPart(descriptor) { var _a$2; const contentType$1 = getPartContentType(descriptor); const contentDisposition = getContentDisposition(descriptor); const headers = (0, httpHeaders_js_1$4.createHttpHeaders)((_a$2 = descriptor.headers) !== null && _a$2 !== void 0 ? _a$2 : {}); if (contentType$1) headers.set("content-type", contentType$1); if (contentDisposition) headers.set("content-disposition", contentDisposition); const body$1 = normalizeBody(descriptor.body, contentType$1); return { headers, body: body$1 }; } function buildMultipartBody(parts) { return { parts: parts.map(buildBodyPart) }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js var require_sendRequest = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/sendRequest.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.sendRequest = sendRequest; const restError_js_1$5 = require_restError$2(); const httpHeaders_js_1$3 = require_httpHeaders$1(); const pipelineRequest_js_1$2 = require_pipelineRequest$1(); const clientHelpers_js_1$1 = require_clientHelpers(); const typeGuards_js_1$1 = require_typeGuards$1(); const multipart_js_1 = require_multipart(); /** * Helper function to send request used by the client * @param method - method to use to send the request * @param url - url to send the request to * @param pipeline - pipeline with the policies to run when sending the request * @param options - request options * @param customHttpClient - a custom HttpClient to use when making the request * @returns returns and HttpResponse */ async function sendRequest(method, url$1, pipeline, options = {}, customHttpClient) { var _a$2; const httpClient = customHttpClient !== null && customHttpClient !== void 0 ? customHttpClient : (0, clientHelpers_js_1$1.getCachedDefaultHttpsClient)(); const request = buildPipelineRequest(method, url$1, options); try { const response = await pipeline.sendRequest(httpClient, request); const headers = response.headers.toJSON(); const stream$3 = (_a$2 = response.readableStreamBody) !== null && _a$2 !== void 0 ? _a$2 : response.browserStreamBody; const parsedBody = options.responseAsStream || stream$3 !== void 0 ? void 0 : getResponseBody(response); const body$1 = stream$3 !== null && stream$3 !== void 0 ? stream$3 : parsedBody; if (options === null || options === void 0 ? void 0 : options.onResponse) options.onResponse(Object.assign(Object.assign({}, response), { request, rawHeaders: headers, parsedBody })); return { request, headers, status: `${response.status}`, body: body$1 }; } catch (e) { if ((0, restError_js_1$5.isRestError)(e) && e.response && options.onResponse) { const { response } = e; const rawHeaders = response.headers.toJSON(); options === null || options === void 0 || options.onResponse(Object.assign(Object.assign({}, response), { request, rawHeaders }), e); } throw e; } } /** * Function to determine the request content type * @param options - request options InternalRequestParameters * @returns returns the content-type */ function getRequestContentType(options = {}) { var _a$2, _b$1, _c$1; return (_c$1 = (_a$2 = options.contentType) !== null && _a$2 !== void 0 ? _a$2 : (_b$1 = options.headers) === null || _b$1 === void 0 ? void 0 : _b$1["content-type"]) !== null && _c$1 !== void 0 ? _c$1 : getContentType(options.body); } /** * Function to determine the content-type of a body * this is used if an explicit content-type is not provided * @param body - body in the request * @returns returns the content-type */ function getContentType(body$1) { if (ArrayBuffer.isView(body$1)) return "application/octet-stream"; if (typeof body$1 === "string") try { JSON.parse(body$1); return "application/json"; } catch (error) { return void 0; } return "application/json"; } function buildPipelineRequest(method, url$1, options = {}) { var _a$2, _b$1, _c$1; const requestContentType = getRequestContentType(options); const { body: body$1, multipartBody } = getRequestBody(options.body, requestContentType); const hasContent = body$1 !== void 0 || multipartBody !== void 0; const headers = (0, httpHeaders_js_1$3.createHttpHeaders)(Object.assign(Object.assign(Object.assign({}, options.headers ? options.headers : {}), { accept: (_c$1 = (_a$2 = options.accept) !== null && _a$2 !== void 0 ? _a$2 : (_b$1 = options.headers) === null || _b$1 === void 0 ? void 0 : _b$1.accept) !== null && _c$1 !== void 0 ? _c$1 : "application/json" }), hasContent && requestContentType && { "content-type": requestContentType })); return (0, pipelineRequest_js_1$2.createPipelineRequest)({ url: url$1, method, body: body$1, multipartBody, headers, allowInsecureConnection: options.allowInsecureConnection, abortSignal: options.abortSignal, onUploadProgress: options.onUploadProgress, onDownloadProgress: options.onDownloadProgress, timeout: options.timeout, enableBrowserStreams: true, streamResponseStatusCodes: options.responseAsStream ? new Set([Number.POSITIVE_INFINITY]) : void 0 }); } /** * Prepares the body before sending the request */ function getRequestBody(body$1, contentType$1 = "") { if (body$1 === void 0) return { body: void 0 }; if (typeof FormData !== "undefined" && body$1 instanceof FormData) return { body: body$1 }; if ((0, typeGuards_js_1$1.isReadableStream)(body$1)) return { body: body$1 }; if (ArrayBuffer.isView(body$1)) return { body: body$1 instanceof Uint8Array ? body$1 : JSON.stringify(body$1) }; const firstType = contentType$1.split(";")[0]; switch (firstType) { case "application/json": return { body: JSON.stringify(body$1) }; case "multipart/form-data": if (Array.isArray(body$1)) return { multipartBody: (0, multipart_js_1.buildMultipartBody)(body$1) }; return { body: JSON.stringify(body$1) }; case "text/plain": return { body: String(body$1) }; default: if (typeof body$1 === "string") return { body: body$1 }; return { body: JSON.stringify(body$1) }; } } /** * Prepares the response body */ function getResponseBody(response) { var _a$2, _b$1; const contentType$1 = (_a$2 = response.headers.get("content-type")) !== null && _a$2 !== void 0 ? _a$2 : ""; const firstType = contentType$1.split(";")[0]; const bodyToParse = (_b$1 = response.bodyAsText) !== null && _b$1 !== void 0 ? _b$1 : ""; if (firstType === "text/plain") return String(bodyToParse); try { return bodyToParse ? JSON.parse(bodyToParse) : void 0; } catch (error) { if (firstType === "application/json") throw createParseError(response, error); return String(bodyToParse); } } function createParseError(response, err) { var _a$2; const msg = `Error "${err}" occurred while parsing the response body - ${response.bodyAsText}.`; const errCode = (_a$2 = err.code) !== null && _a$2 !== void 0 ? _a$2 : restError_js_1$5.RestError.PARSE_ERROR; return new restError_js_1$5.RestError(msg, { code: errCode, statusCode: response.status, request: response.request, response }); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js var require_urlHelpers$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/urlHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.buildRequestUrl = buildRequestUrl; exports.buildBaseUrl = buildBaseUrl; exports.replaceAll = replaceAll$1; function isQueryParameterWithOptions(x) { const value = x.value; return value !== void 0 && value.toString !== void 0 && typeof value.toString === "function"; } /** * Builds the request url, filling in query and path parameters * @param endpoint - base url which can be a template url * @param routePath - path to append to the endpoint * @param pathParameters - values of the path parameters * @param options - request parameters including query parameters * @returns a full url with path and query parameters */ function buildRequestUrl(endpoint, routePath, pathParameters, options = {}) { if (routePath.startsWith("https://") || routePath.startsWith("http://")) return routePath; endpoint = buildBaseUrl(endpoint, options); routePath = buildRoutePath(routePath, pathParameters, options); const requestUrl = appendQueryParams$1(`${endpoint}/${routePath}`, options); const url$1 = new URL(requestUrl); return url$1.toString().replace(/([^:]\/)\/+/g, "$1"); } function getQueryParamValue(key, allowReserved, style, param) { let separator; if (style === "pipeDelimited") separator = "|"; else if (style === "spaceDelimited") separator = "%20"; else separator = ","; let paramValues; if (Array.isArray(param)) paramValues = param; else if (typeof param === "object" && param.toString === Object.prototype.toString) paramValues = Object.entries(param).flat(); else paramValues = [param]; const value = paramValues.map((p) => { if (p === null || p === void 0) return ""; if (!p.toString || typeof p.toString !== "function") throw new Error(`Query parameters must be able to be represented as string, ${key} can't`); const rawValue = p.toISOString !== void 0 ? p.toISOString() : p.toString(); return allowReserved ? rawValue : encodeURIComponent(rawValue); }).join(separator); return `${allowReserved ? key : encodeURIComponent(key)}=${value}`; } function appendQueryParams$1(url$1, options = {}) { var _a$2, _b$1, _c$1, _d$1; if (!options.queryParameters) return url$1; const parsedUrl = new URL(url$1); const queryParams = options.queryParameters; const paramStrings = []; for (const key of Object.keys(queryParams)) { const param = queryParams[key]; if (param === void 0 || param === null) continue; const hasMetadata = isQueryParameterWithOptions(param); const rawValue = hasMetadata ? param.value : param; const explode = hasMetadata ? (_a$2 = param.explode) !== null && _a$2 !== void 0 ? _a$2 : false : false; const style = hasMetadata && param.style ? param.style : "form"; if (explode) if (Array.isArray(rawValue)) for (const item of rawValue) paramStrings.push(getQueryParamValue(key, (_b$1 = options.skipUrlEncoding) !== null && _b$1 !== void 0 ? _b$1 : false, style, item)); else if (typeof rawValue === "object") for (const [actualKey, value] of Object.entries(rawValue)) paramStrings.push(getQueryParamValue(actualKey, (_c$1 = options.skipUrlEncoding) !== null && _c$1 !== void 0 ? _c$1 : false, style, value)); else throw new Error("explode can only be set to true for objects and arrays"); else paramStrings.push(getQueryParamValue(key, (_d$1 = options.skipUrlEncoding) !== null && _d$1 !== void 0 ? _d$1 : false, style, rawValue)); } if (parsedUrl.search !== "") parsedUrl.search += "&"; parsedUrl.search += paramStrings.join("&"); return parsedUrl.toString(); } function buildBaseUrl(endpoint, options) { var _a$2; if (!options.pathParameters) return endpoint; const pathParams = options.pathParameters; for (const [key, param] of Object.entries(pathParams)) { if (param === void 0 || param === null) throw new Error(`Path parameters ${key} must not be undefined or null`); if (!param.toString || typeof param.toString !== "function") throw new Error(`Path parameters must be able to be represented as string, ${key} can't`); let value = param.toISOString !== void 0 ? param.toISOString() : String(param); if (!options.skipUrlEncoding) value = encodeURIComponent(param); endpoint = (_a$2 = replaceAll$1(endpoint, `{${key}}`, value)) !== null && _a$2 !== void 0 ? _a$2 : ""; } return endpoint; } function buildRoutePath(routePath, pathParameters, options = {}) { var _a$2; for (const pathParam of pathParameters) { const allowReserved = typeof pathParam === "object" && ((_a$2 = pathParam.allowReserved) !== null && _a$2 !== void 0 ? _a$2 : false); let value = typeof pathParam === "object" ? pathParam.value : pathParam; if (!options.skipUrlEncoding && !allowReserved) value = encodeURIComponent(value); routePath = routePath.replace(/\{[\w-]+\}/, String(value)); } return routePath; } /** * Replace all of the instances of searchValue in value with the provided replaceValue. * @param value - The value to search and replace in. * @param searchValue - The value to search for in the value argument. * @param replaceValue - The value to replace searchValue with in the value argument. * @returns The value where each instance of searchValue was replaced with replacedValue. */ function replaceAll$1(value, searchValue, replaceValue) { return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js var require_getClient = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/getClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getClient = getClient; const clientHelpers_js_1 = require_clientHelpers(); const sendRequest_js_1 = require_sendRequest(); const urlHelpers_js_1$1 = require_urlHelpers$1(); const checkEnvironment_js_1$1 = require_checkEnvironment(); /** * Creates a client with a default pipeline * @param endpoint - Base endpoint for the client * @param credentials - Credentials to authenticate the requests * @param options - Client options */ function getClient(endpoint, clientOptions = {}) { var _a$2, _b$1, _c$1; const pipeline = (_a$2 = clientOptions.pipeline) !== null && _a$2 !== void 0 ? _a$2 : (0, clientHelpers_js_1.createDefaultPipeline)(clientOptions); if ((_b$1 = clientOptions.additionalPolicies) === null || _b$1 === void 0 ? void 0 : _b$1.length) for (const { policy, position } of clientOptions.additionalPolicies) { const afterPhase = position === "perRetry" ? "Sign" : void 0; pipeline.addPolicy(policy, { afterPhase }); } const { allowInsecureConnection: allowInsecureConnection$1, httpClient } = clientOptions; const endpointUrl = (_c$1 = clientOptions.endpoint) !== null && _c$1 !== void 0 ? _c$1 : endpoint; const client = (path$13, ...args) => { const getUrl = (requestOptions) => (0, urlHelpers_js_1$1.buildRequestUrl)(endpointUrl, path$13, args, Object.assign({ allowInsecureConnection: allowInsecureConnection$1 }, requestOptions)); return { get: (requestOptions = {}) => { return buildOperation("GET", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, post: (requestOptions = {}) => { return buildOperation("POST", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, put: (requestOptions = {}) => { return buildOperation("PUT", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, patch: (requestOptions = {}) => { return buildOperation("PATCH", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, delete: (requestOptions = {}) => { return buildOperation("DELETE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, head: (requestOptions = {}) => { return buildOperation("HEAD", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, options: (requestOptions = {}) => { return buildOperation("OPTIONS", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); }, trace: (requestOptions = {}) => { return buildOperation("TRACE", getUrl(requestOptions), pipeline, requestOptions, allowInsecureConnection$1, httpClient); } }; }; return { path: client, pathUnchecked: client, pipeline }; } function buildOperation(method, url$1, pipeline, options, allowInsecureConnection$1, httpClient) { var _a$2; allowInsecureConnection$1 = (_a$2 = options.allowInsecureConnection) !== null && _a$2 !== void 0 ? _a$2 : allowInsecureConnection$1; return { then: function(onFulfilled, onrejected) { return (0, sendRequest_js_1.sendRequest)(method, url$1, pipeline, Object.assign(Object.assign({}, options), { allowInsecureConnection: allowInsecureConnection$1 }), httpClient).then(onFulfilled, onrejected); }, async asBrowserStream() { if (checkEnvironment_js_1$1.isNodeLike) throw new Error("`asBrowserStream` is supported only in the browser environment. Use `asNodeStream` instead to obtain the response body stream. If you require a Web stream of the response in Node, consider using `Readable.toWeb` on the result of `asNodeStream`."); else return (0, sendRequest_js_1.sendRequest)(method, url$1, pipeline, Object.assign(Object.assign({}, options), { allowInsecureConnection: allowInsecureConnection$1, responseAsStream: true }), httpClient); }, async asNodeStream() { if (checkEnvironment_js_1$1.isNodeLike) return (0, sendRequest_js_1.sendRequest)(method, url$1, pipeline, Object.assign(Object.assign({}, options), { allowInsecureConnection: allowInsecureConnection$1, responseAsStream: true }), httpClient); else throw new Error("`isNodeStream` is not supported in the browser environment. Use `asBrowserStream` to obtain the response body stream."); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js var require_operationOptionHelpers = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/operationOptionHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.operationOptionsToRequestParameters = operationOptionsToRequestParameters; /** * Helper function to convert OperationOptions to RequestParameters * @param options - the options that are used by Modular layer to send the request * @returns the result of the conversion in RequestParameters of RLC layer */ function operationOptionsToRequestParameters(options) { var _a$2, _b$1, _c$1, _d$1, _e, _f; return { allowInsecureConnection: (_a$2 = options.requestOptions) === null || _a$2 === void 0 ? void 0 : _a$2.allowInsecureConnection, timeout: (_b$1 = options.requestOptions) === null || _b$1 === void 0 ? void 0 : _b$1.timeout, skipUrlEncoding: (_c$1 = options.requestOptions) === null || _c$1 === void 0 ? void 0 : _c$1.skipUrlEncoding, abortSignal: options.abortSignal, onUploadProgress: (_d$1 = options.requestOptions) === null || _d$1 === void 0 ? void 0 : _d$1.onUploadProgress, onDownloadProgress: (_e = options.requestOptions) === null || _e === void 0 ? void 0 : _e.onDownloadProgress, headers: Object.assign({}, (_f = options.requestOptions) === null || _f === void 0 ? void 0 : _f.headers), onResponse: options.onResponse }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js var require_restError$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/client/restError.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createRestError = createRestError; const restError_js_1$4 = require_restError$2(); const httpHeaders_js_1$2 = require_httpHeaders$1(); function createRestError(messageOrResponse, response) { var _a$2, _b$1, _c$1; const resp = typeof messageOrResponse === "string" ? response : messageOrResponse; const internalError = (_b$1 = (_a$2 = resp.body) === null || _a$2 === void 0 ? void 0 : _a$2.error) !== null && _b$1 !== void 0 ? _b$1 : resp.body; const message = typeof messageOrResponse === "string" ? messageOrResponse : (_c$1 = internalError === null || internalError === void 0 ? void 0 : internalError.message) !== null && _c$1 !== void 0 ? _c$1 : `Unexpected status code: ${resp.status}`; return new restError_js_1$4.RestError(message, { statusCode: statusCodeToNumber(resp.status), code: internalError === null || internalError === void 0 ? void 0 : internalError.code, request: resp.request, response: toPipelineResponse$1(resp) }); } function toPipelineResponse$1(response) { var _a$2; return { headers: (0, httpHeaders_js_1$2.createHttpHeaders)(response.headers), request: response.request, status: (_a$2 = statusCodeToNumber(response.status)) !== null && _a$2 !== void 0 ? _a$2 : -1 }; } function statusCodeToNumber(statusCode) { const status = Number.parseInt(statusCode); return Number.isNaN(status) ? void 0 : status; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js var require_commonjs$12 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createRestError = exports.operationOptionsToRequestParameters = exports.getClient = exports.createDefaultHttpClient = exports.uint8ArrayToString = exports.stringToUint8Array = exports.isRestError = exports.RestError = exports.createEmptyPipeline = exports.createPipelineRequest = exports.createHttpHeaders = exports.TypeSpecRuntimeLogger = exports.setLogLevel = exports.getLogLevel = exports.createClientLogger = exports.AbortError = void 0; const tslib_1$4 = require_tslib(); var AbortError_js_1$1 = require_AbortError$1(); Object.defineProperty(exports, "AbortError", { enumerable: true, get: function() { return AbortError_js_1$1.AbortError; } }); var logger_js_1$4 = require_logger$1(); Object.defineProperty(exports, "createClientLogger", { enumerable: true, get: function() { return logger_js_1$4.createClientLogger; } }); Object.defineProperty(exports, "getLogLevel", { enumerable: true, get: function() { return logger_js_1$4.getLogLevel; } }); Object.defineProperty(exports, "setLogLevel", { enumerable: true, get: function() { return logger_js_1$4.setLogLevel; } }); Object.defineProperty(exports, "TypeSpecRuntimeLogger", { enumerable: true, get: function() { return logger_js_1$4.TypeSpecRuntimeLogger; } }); var httpHeaders_js_1$1 = require_httpHeaders$1(); Object.defineProperty(exports, "createHttpHeaders", { enumerable: true, get: function() { return httpHeaders_js_1$1.createHttpHeaders; } }); tslib_1$4.__exportStar(require_schemes(), exports); tslib_1$4.__exportStar(require_oauth2Flows(), exports); var pipelineRequest_js_1$1 = require_pipelineRequest$1(); Object.defineProperty(exports, "createPipelineRequest", { enumerable: true, get: function() { return pipelineRequest_js_1$1.createPipelineRequest; } }); var pipeline_js_1$4 = require_pipeline$2(); Object.defineProperty(exports, "createEmptyPipeline", { enumerable: true, get: function() { return pipeline_js_1$4.createEmptyPipeline; } }); var restError_js_1$3 = require_restError$2(); Object.defineProperty(exports, "RestError", { enumerable: true, get: function() { return restError_js_1$3.RestError; } }); Object.defineProperty(exports, "isRestError", { enumerable: true, get: function() { return restError_js_1$3.isRestError; } }); var bytesEncoding_js_1$1 = require_bytesEncoding(); Object.defineProperty(exports, "stringToUint8Array", { enumerable: true, get: function() { return bytesEncoding_js_1$1.stringToUint8Array; } }); Object.defineProperty(exports, "uint8ArrayToString", { enumerable: true, get: function() { return bytesEncoding_js_1$1.uint8ArrayToString; } }); var defaultHttpClient_js_1$1 = require_defaultHttpClient$1(); Object.defineProperty(exports, "createDefaultHttpClient", { enumerable: true, get: function() { return defaultHttpClient_js_1$1.createDefaultHttpClient; } }); var getClient_js_1 = require_getClient(); Object.defineProperty(exports, "getClient", { enumerable: true, get: function() { return getClient_js_1.getClient; } }); var operationOptionHelpers_js_1 = require_operationOptionHelpers(); Object.defineProperty(exports, "operationOptionsToRequestParameters", { enumerable: true, get: function() { return operationOptionHelpers_js_1.operationOptionsToRequestParameters; } }); var restError_js_2 = require_restError$1(); Object.defineProperty(exports, "createRestError", { enumerable: true, get: function() { return restError_js_2.createRestError; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js var require_pipeline$1 = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createEmptyPipeline = createEmptyPipeline; const ts_http_runtime_1$4 = require_commonjs$12(); /** * Creates a totally empty pipeline. * Useful for testing or creating a custom one. */ function createEmptyPipeline() { return (0, ts_http_runtime_1$4.createEmptyPipeline)(); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js var require_internal$2 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/logger/internal.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createLoggerContext = void 0; var logger_js_1$3 = require_logger$1(); Object.defineProperty(exports, "createLoggerContext", { enumerable: true, get: function() { return logger_js_1$3.createLoggerContext; } }); } }); //#endregion //#region node_modules/.deno/@azure+logger@1.2.0/node_modules/@azure/logger/dist/commonjs/index.js var require_commonjs$11 = __commonJS({ "node_modules/.deno/@azure+logger@1.2.0/node_modules/@azure/logger/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AzureLogger = void 0; exports.setLogLevel = setLogLevel; exports.getLogLevel = getLogLevel; exports.createClientLogger = createClientLogger; const logger_1$4 = require_internal$2(); const context = (0, logger_1$4.createLoggerContext)({ logLevelEnvVarName: "AZURE_LOG_LEVEL", namespace: "azure" }); /** * The AzureLogger provides a mechanism for overriding where logs are output to. * By default, logs are sent to stderr. * Override the `log` method to redirect logs to another location. */ exports.AzureLogger = context.logger; /** * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. * @param level - The log level to enable for logging. * Options from most verbose to least verbose are: * - verbose * - info * - warning * - error */ function setLogLevel(level) { context.setLogLevel(level); } /** * Retrieves the currently specified log level. */ function getLogLevel() { return context.getLogLevel(); } /** * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. * @param namespace - The name of the SDK package. * @hidden */ function createClientLogger(namespace) { return context.createClientLogger(namespace); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js var require_log$1 = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logger = void 0; const logger_1$3 = require_commonjs$11(); exports.logger = (0, logger_1$3.createClientLogger)("core-rest-pipeline"); } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js var require_exponentialRetryPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/exponentialRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.exponentialRetryPolicyName = void 0; exports.exponentialRetryPolicy = exponentialRetryPolicy$1; const exponentialRetryStrategy_js_1$1 = require_exponentialRetryStrategy(); const retryPolicy_js_1$4 = require_retryPolicy$1(); const constants_js_1$8 = require_constants$2(); /** * The programmatic identifier of the exponentialRetryPolicy. */ exports.exponentialRetryPolicyName = "exponentialRetryPolicy"; /** * A policy that attempts to retry requests while introducing an exponentially increasing delay. * @param options - Options that configure retry logic. */ function exponentialRetryPolicy$1(options = {}) { var _a$2; return (0, retryPolicy_js_1$4.retryPolicy)([(0, exponentialRetryStrategy_js_1$1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true }))], { maxRetries: (_a$2 = options.maxRetries) !== null && _a$2 !== void 0 ? _a$2 : constants_js_1$8.DEFAULT_RETRY_POLICY_COUNT }); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js var require_systemErrorRetryPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.systemErrorRetryPolicyName = void 0; exports.systemErrorRetryPolicy = systemErrorRetryPolicy$1; const exponentialRetryStrategy_js_1 = require_exponentialRetryStrategy(); const retryPolicy_js_1$3 = require_retryPolicy$1(); const constants_js_1$7 = require_constants$2(); /** * Name of the {@link systemErrorRetryPolicy} */ exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; /** * A retry policy that specifically seeks to handle errors in the * underlying transport layer (e.g. DNS lookup failures) rather than * retryable error codes from the server itself. * @param options - Options that customize the policy. */ function systemErrorRetryPolicy$1(options = {}) { var _a$2; return { name: exports.systemErrorRetryPolicyName, sendRequest: (0, retryPolicy_js_1$3.retryPolicy)([(0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true }))], { maxRetries: (_a$2 = options.maxRetries) !== null && _a$2 !== void 0 ? _a$2 : constants_js_1$7.DEFAULT_RETRY_POLICY_COUNT }).sendRequest }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js var require_throttlingRetryPolicy$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/throttlingRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.throttlingRetryPolicyName = void 0; exports.throttlingRetryPolicy = throttlingRetryPolicy$1; const throttlingRetryStrategy_js_1 = require_throttlingRetryStrategy(); const retryPolicy_js_1$2 = require_retryPolicy$1(); const constants_js_1$6 = require_constants$2(); /** * Name of the {@link throttlingRetryPolicy} */ exports.throttlingRetryPolicyName = "throttlingRetryPolicy"; /** * A policy that retries when the server sends a 429 response with a Retry-After header. * * To learn more, please refer to * https://learn.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://learn.microsoft.com/en-us/azure/azure-subscription-service-limits and * https://learn.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors * * @param options - Options that configure retry logic. */ function throttlingRetryPolicy$1(options = {}) { var _a$2; return { name: exports.throttlingRetryPolicyName, sendRequest: (0, retryPolicy_js_1$2.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { maxRetries: (_a$2 = options.maxRetries) !== null && _a$2 !== void 0 ? _a$2 : constants_js_1$6.DEFAULT_RETRY_POLICY_COUNT }).sendRequest }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js var require_internal$1 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/policies/internal.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.userAgentPolicyName = exports.userAgentPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.retryPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.defaultRetryPolicyName = exports.defaultRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.agentPolicyName = exports.agentPolicy = void 0; var agentPolicy_js_1$2 = require_agentPolicy$1(); Object.defineProperty(exports, "agentPolicy", { enumerable: true, get: function() { return agentPolicy_js_1$2.agentPolicy; } }); Object.defineProperty(exports, "agentPolicyName", { enumerable: true, get: function() { return agentPolicy_js_1$2.agentPolicyName; } }); var decompressResponsePolicy_js_1$2 = require_decompressResponsePolicy$1(); Object.defineProperty(exports, "decompressResponsePolicy", { enumerable: true, get: function() { return decompressResponsePolicy_js_1$2.decompressResponsePolicy; } }); Object.defineProperty(exports, "decompressResponsePolicyName", { enumerable: true, get: function() { return decompressResponsePolicy_js_1$2.decompressResponsePolicyName; } }); var defaultRetryPolicy_js_1$2 = require_defaultRetryPolicy$1(); Object.defineProperty(exports, "defaultRetryPolicy", { enumerable: true, get: function() { return defaultRetryPolicy_js_1$2.defaultRetryPolicy; } }); Object.defineProperty(exports, "defaultRetryPolicyName", { enumerable: true, get: function() { return defaultRetryPolicy_js_1$2.defaultRetryPolicyName; } }); var exponentialRetryPolicy_js_1$1 = require_exponentialRetryPolicy$1(); Object.defineProperty(exports, "exponentialRetryPolicy", { enumerable: true, get: function() { return exponentialRetryPolicy_js_1$1.exponentialRetryPolicy; } }); Object.defineProperty(exports, "exponentialRetryPolicyName", { enumerable: true, get: function() { return exponentialRetryPolicy_js_1$1.exponentialRetryPolicyName; } }); var retryPolicy_js_1$1 = require_retryPolicy$1(); Object.defineProperty(exports, "retryPolicy", { enumerable: true, get: function() { return retryPolicy_js_1$1.retryPolicy; } }); var systemErrorRetryPolicy_js_1$1 = require_systemErrorRetryPolicy$1(); Object.defineProperty(exports, "systemErrorRetryPolicy", { enumerable: true, get: function() { return systemErrorRetryPolicy_js_1$1.systemErrorRetryPolicy; } }); Object.defineProperty(exports, "systemErrorRetryPolicyName", { enumerable: true, get: function() { return systemErrorRetryPolicy_js_1$1.systemErrorRetryPolicyName; } }); var throttlingRetryPolicy_js_1$1 = require_throttlingRetryPolicy$1(); Object.defineProperty(exports, "throttlingRetryPolicy", { enumerable: true, get: function() { return throttlingRetryPolicy_js_1$1.throttlingRetryPolicy; } }); Object.defineProperty(exports, "throttlingRetryPolicyName", { enumerable: true, get: function() { return throttlingRetryPolicy_js_1$1.throttlingRetryPolicyName; } }); var formDataPolicy_js_1$2 = require_formDataPolicy$1(); Object.defineProperty(exports, "formDataPolicy", { enumerable: true, get: function() { return formDataPolicy_js_1$2.formDataPolicy; } }); Object.defineProperty(exports, "formDataPolicyName", { enumerable: true, get: function() { return formDataPolicy_js_1$2.formDataPolicyName; } }); var logPolicy_js_1$2 = require_logPolicy$1(); Object.defineProperty(exports, "logPolicy", { enumerable: true, get: function() { return logPolicy_js_1$2.logPolicy; } }); Object.defineProperty(exports, "logPolicyName", { enumerable: true, get: function() { return logPolicy_js_1$2.logPolicyName; } }); var multipartPolicy_js_1$2 = require_multipartPolicy$1(); Object.defineProperty(exports, "multipartPolicy", { enumerable: true, get: function() { return multipartPolicy_js_1$2.multipartPolicy; } }); Object.defineProperty(exports, "multipartPolicyName", { enumerable: true, get: function() { return multipartPolicy_js_1$2.multipartPolicyName; } }); var proxyPolicy_js_1$2 = require_proxyPolicy$1(); Object.defineProperty(exports, "proxyPolicy", { enumerable: true, get: function() { return proxyPolicy_js_1$2.proxyPolicy; } }); Object.defineProperty(exports, "proxyPolicyName", { enumerable: true, get: function() { return proxyPolicy_js_1$2.proxyPolicyName; } }); Object.defineProperty(exports, "getDefaultProxySettings", { enumerable: true, get: function() { return proxyPolicy_js_1$2.getDefaultProxySettings; } }); var redirectPolicy_js_1$2 = require_redirectPolicy$1(); Object.defineProperty(exports, "redirectPolicy", { enumerable: true, get: function() { return redirectPolicy_js_1$2.redirectPolicy; } }); Object.defineProperty(exports, "redirectPolicyName", { enumerable: true, get: function() { return redirectPolicy_js_1$2.redirectPolicyName; } }); var tlsPolicy_js_1$2 = require_tlsPolicy$1(); Object.defineProperty(exports, "tlsPolicy", { enumerable: true, get: function() { return tlsPolicy_js_1$2.tlsPolicy; } }); Object.defineProperty(exports, "tlsPolicyName", { enumerable: true, get: function() { return tlsPolicy_js_1$2.tlsPolicyName; } }); var userAgentPolicy_js_1$2 = require_userAgentPolicy$1(); Object.defineProperty(exports, "userAgentPolicy", { enumerable: true, get: function() { return userAgentPolicy_js_1$2.userAgentPolicy; } }); Object.defineProperty(exports, "userAgentPolicyName", { enumerable: true, get: function() { return userAgentPolicy_js_1$2.userAgentPolicyName; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js var require_logPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logPolicyName = void 0; exports.logPolicy = logPolicy; const log_js_1$5 = require_log$1(); const policies_1$12 = require_internal$1(); /** * The programmatic identifier of the logPolicy. */ exports.logPolicyName = policies_1$12.logPolicyName; /** * A policy that logs all requests and responses. * @param options - Options to configure logPolicy. */ function logPolicy(options = {}) { return (0, policies_1$12.logPolicy)(Object.assign({ logger: log_js_1$5.logger.info }, options)); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js var require_redirectPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.redirectPolicyName = void 0; exports.redirectPolicy = redirectPolicy; const policies_1$11 = require_internal$1(); /** * The programmatic identifier of the redirectPolicy. */ exports.redirectPolicyName = policies_1$11.redirectPolicyName; /** * A policy to follow Location headers from the server in order * to support server-side redirection. * In the browser, this policy is not used. * @param options - Options to control policy behavior. */ function redirectPolicy(options = {}) { return (0, policies_1$11.redirectPolicy)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js var require_userAgentPlatform = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getHeaderName = getHeaderName; exports.setPlatformSpecificData = setPlatformSpecificData; const tslib_1$3 = require_tslib(); const os$1 = tslib_1$3.__importStar(__require("node:os")); const process$2 = tslib_1$3.__importStar(__require("node:process")); /** * @internal */ function getHeaderName() { return "User-Agent"; } /** * @internal */ async function setPlatformSpecificData(map) { if (process$2 && process$2.versions) { const versions = process$2.versions; if (versions.bun) map.set("Bun", versions.bun); else if (versions.deno) map.set("Deno", versions.deno); else if (versions.node) map.set("Node", versions.node); } map.set("OS", `(${os$1.arch()}-${os$1.type()}-${os$1.release()})`); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js var require_constants$1 = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; exports.SDK_VERSION = "1.20.0"; exports.DEFAULT_RETRY_POLICY_COUNT = 3; } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js var require_userAgent = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getUserAgentHeaderName = getUserAgentHeaderName; exports.getUserAgentValue = getUserAgentValue; const userAgentPlatform_js_1 = require_userAgentPlatform(); const constants_js_1$5 = require_constants$1(); function getUserAgentString$1(telemetryInfo) { const parts = []; for (const [key, value] of telemetryInfo) { const token = value ? `${key}/${value}` : key; parts.push(token); } return parts.join(" "); } /** * @internal */ function getUserAgentHeaderName() { return (0, userAgentPlatform_js_1.getHeaderName)(); } /** * @internal */ async function getUserAgentValue(prefix$1) { const runtimeInfo = new Map(); runtimeInfo.set("core-rest-pipeline", constants_js_1$5.SDK_VERSION); await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); const defaultAgent = getUserAgentString$1(runtimeInfo); const userAgentValue = prefix$1 ? `${prefix$1} ${defaultAgent}` : defaultAgent; return userAgentValue; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js var require_userAgentPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.userAgentPolicyName = void 0; exports.userAgentPolicy = userAgentPolicy; const userAgent_js_1$1 = require_userAgent(); const UserAgentHeaderName = (0, userAgent_js_1$1.getUserAgentHeaderName)(); /** * The programmatic identifier of the userAgentPolicy. */ exports.userAgentPolicyName = "userAgentPolicy"; /** * A policy that sets the User-Agent header (or equivalent) to reflect * the library version. * @param options - Options to customize the user agent value. */ function userAgentPolicy(options = {}) { const userAgentValue = (0, userAgent_js_1$1.getUserAgentValue)(options.userAgentPrefix); return { name: exports.userAgentPolicyName, async sendRequest(request, next) { if (!request.headers.has(UserAgentHeaderName)) request.headers.set(UserAgentHeaderName, await userAgentValue); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js var require_sha256 = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/sha256.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.computeSha256Hmac = computeSha256Hmac$1; exports.computeSha256Hash = computeSha256Hash$1; const node_crypto_1 = __require("node:crypto"); /** * Generates a SHA-256 HMAC signature. * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. * @param stringToSign - The data to be signed. * @param encoding - The textual encoding to use for the returned HMAC digest. */ async function computeSha256Hmac$1(key, stringToSign, encoding) { const decodedKey = Buffer.from(key, "base64"); return (0, node_crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); } /** * Generates a SHA-256 hash. * @param content - The data to be included in the hash. * @param encoding - The textual encoding to use for the returned hash. */ async function computeSha256Hash$1(content, encoding) { return (0, node_crypto_1.createHash)("sha256").update(content).digest(encoding); } } }); //#endregion //#region node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js var require_internal = __commonJS({ "node_modules/.deno/@typespec+ts-http-runtime@0.2.2/node_modules/@typespec/ts-http-runtime/dist/commonjs/util/internal.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Sanitizer = exports.uint8ArrayToString = exports.stringToUint8Array = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isBun = exports.isBrowser = exports.randomUUID = exports.computeSha256Hmac = exports.computeSha256Hash = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.calculateRetryDelay = void 0; var delay_js_1$1 = require_delay$1(); Object.defineProperty(exports, "calculateRetryDelay", { enumerable: true, get: function() { return delay_js_1$1.calculateRetryDelay; } }); var random_js_1 = require_random(); Object.defineProperty(exports, "getRandomIntegerInclusive", { enumerable: true, get: function() { return random_js_1.getRandomIntegerInclusive; } }); var object_js_1 = require_object(); Object.defineProperty(exports, "isObject", { enumerable: true, get: function() { return object_js_1.isObject; } }); var error_js_1$1 = require_error$1(); Object.defineProperty(exports, "isError", { enumerable: true, get: function() { return error_js_1$1.isError; } }); var sha256_js_1 = require_sha256(); Object.defineProperty(exports, "computeSha256Hash", { enumerable: true, get: function() { return sha256_js_1.computeSha256Hash; } }); Object.defineProperty(exports, "computeSha256Hmac", { enumerable: true, get: function() { return sha256_js_1.computeSha256Hmac; } }); var uuidUtils_js_1 = require_uuidUtils(); Object.defineProperty(exports, "randomUUID", { enumerable: true, get: function() { return uuidUtils_js_1.randomUUID; } }); var checkEnvironment_js_1 = require_checkEnvironment(); Object.defineProperty(exports, "isBrowser", { enumerable: true, get: function() { return checkEnvironment_js_1.isBrowser; } }); Object.defineProperty(exports, "isBun", { enumerable: true, get: function() { return checkEnvironment_js_1.isBun; } }); Object.defineProperty(exports, "isNodeLike", { enumerable: true, get: function() { return checkEnvironment_js_1.isNodeLike; } }); Object.defineProperty(exports, "isNodeRuntime", { enumerable: true, get: function() { return checkEnvironment_js_1.isNodeRuntime; } }); Object.defineProperty(exports, "isDeno", { enumerable: true, get: function() { return checkEnvironment_js_1.isDeno; } }); Object.defineProperty(exports, "isReactNative", { enumerable: true, get: function() { return checkEnvironment_js_1.isReactNative; } }); Object.defineProperty(exports, "isWebWorker", { enumerable: true, get: function() { return checkEnvironment_js_1.isWebWorker; } }); var bytesEncoding_js_1 = require_bytesEncoding(); Object.defineProperty(exports, "stringToUint8Array", { enumerable: true, get: function() { return bytesEncoding_js_1.stringToUint8Array; } }); Object.defineProperty(exports, "uint8ArrayToString", { enumerable: true, get: function() { return bytesEncoding_js_1.uint8ArrayToString; } }); var sanitizer_js_1 = require_sanitizer(); Object.defineProperty(exports, "Sanitizer", { enumerable: true, get: function() { return sanitizer_js_1.Sanitizer; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js var require_aborterUtils = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.cancelablePromiseRace = cancelablePromiseRace; /** * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ async function cancelablePromiseRace(abortablePromiseBuilders, options) { var _a$2, _b$1; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } (_a$2 = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a$2 === void 0 || _a$2.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); (_b$1 = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b$1 === void 0 || _b$1.removeEventListener("abort", abortHandler); } } } }); //#endregion //#region node_modules/.deno/@azure+abort-controller@2.1.2/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js var require_AbortError = __commonJS({ "node_modules/.deno/@azure+abort-controller@2.1.2/node_modules/@azure/abort-controller/dist/commonjs/AbortError.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AbortError = void 0; /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ var AbortError$1 = class extends Error { constructor(message) { super(message); this.name = "AbortError"; } }; exports.AbortError = AbortError$1; } }); //#endregion //#region node_modules/.deno/@azure+abort-controller@2.1.2/node_modules/@azure/abort-controller/dist/commonjs/index.js var require_commonjs$10 = __commonJS({ "node_modules/.deno/@azure+abort-controller@2.1.2/node_modules/@azure/abort-controller/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AbortError = void 0; var AbortError_js_1 = require_AbortError(); Object.defineProperty(exports, "AbortError", { enumerable: true, get: function() { return AbortError_js_1.AbortError; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js var require_createAbortablePromise = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createAbortablePromise = createAbortablePromise; const abort_controller_1$1 = require_commonjs$10(); /** * Creates an abortable promise. * @param buildPromise - A function that takes the resolve and reject functions as parameters. * @param options - The options for the abortable promise. * @returns A promise that can be aborted. */ function createAbortablePromise(buildPromise, options) { const { cleanupBeforeAbort, abortSignal: abortSignal$1, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { reject(new abort_controller_1$1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { abortSignal$1 === null || abortSignal$1 === void 0 || abortSignal$1.removeEventListener("abort", onAbort); } function onAbort() { cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 || cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } if (abortSignal$1 === null || abortSignal$1 === void 0 ? void 0 : abortSignal$1.aborted) return rejectOnAbort(); try { buildPromise((x) => { removeListeners(); resolve(x); }, (x) => { removeListeners(); reject(x); }); } catch (err) { reject(err); } abortSignal$1 === null || abortSignal$1 === void 0 || abortSignal$1.addEventListener("abort", onAbort); }); } } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/delay.js var require_delay = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/delay.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.delay = delay$2; exports.calculateRetryDelay = calculateRetryDelay$1; const createAbortablePromise_js_1$1 = require_createAbortablePromise(); const util_1$3 = require_internal(); const StandardAbortMessage = "The delay was aborted."; /** * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. * @param timeInMs - The number of milliseconds to be delayed. * @param options - The options for delay - currently abort options * @returns Promise that is resolved after timeInMs */ function delay$2(timeInMs, options) { let token; const { abortSignal: abortSignal$1, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return (0, createAbortablePromise_js_1$1.createAbortablePromise)((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal: abortSignal$1, abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage }); } /** * Calculates the delay interval for retry attempts using exponential delay with jitter. * @param retryAttempt - The current retry attempt number. * @param config - The exponential retry configuration. * @returns An object containing the calculated retry delay. */ function calculateRetryDelay$1(retryAttempt, config) { const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); const retryAfterInMs = clampedDelay / 2 + (0, util_1$3.getRandomIntegerInclusive)(0, clampedDelay / 2); return { retryAfterInMs }; } } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/error.js var require_error = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/error.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getErrorMessage = getErrorMessage; const util_1$2 = require_internal(); /** * Given what is thought to be an error object, return the message if possible. * If the message is missing, returns a stringified version of the input. * @param e - Something thrown from a try block * @returns The error message or a string of the input */ function getErrorMessage(e) { if ((0, util_1$2.isError)(e)) return e.message; else { let stringified; try { if (typeof e === "object" && e) stringified = JSON.stringify(e); else stringified = String(e); } catch (err) { stringified = "[unable to stringify input]"; } return `Unknown error ${stringified}`; } } } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/typeGuards.js var require_typeGuards = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/typeGuards.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isDefined = isDefined; exports.isObjectWithProperties = isObjectWithProperties; exports.objectHasProperty = objectHasProperty; /** * Helper TypeGuard that checks if something is defined or not. * @param thing - Anything */ function isDefined(thing) { return typeof thing !== "undefined" && thing !== null; } /** * Helper TypeGuard that checks if the input is an object with the specified properties. * @param thing - Anything. * @param properties - The name of the properties that should appear in the object. */ function isObjectWithProperties(thing, properties) { if (!isDefined(thing) || typeof thing !== "object") return false; for (const property of properties) if (!objectHasProperty(thing, property)) return false; return true; } /** * Helper TypeGuard that checks if the input is an object with the specified property. * @param thing - Any object. * @param property - The name of the property that should appear in the object. */ function objectHasProperty(thing, property) { return isDefined(thing) && typeof thing === "object" && property in thing; } } }); //#endregion //#region node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/index.js var require_commonjs$9 = __commonJS({ "node_modules/.deno/@azure+core-util@1.12.0/node_modules/@azure/core-util/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isWebWorker = exports.isReactNative = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isDeno = exports.isBun = exports.isBrowser = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.getErrorMessage = exports.delay = exports.createAbortablePromise = exports.cancelablePromiseRace = void 0; exports.calculateRetryDelay = calculateRetryDelay; exports.computeSha256Hash = computeSha256Hash; exports.computeSha256Hmac = computeSha256Hmac; exports.getRandomIntegerInclusive = getRandomIntegerInclusive; exports.isError = isError; exports.isObject = isObject; exports.randomUUID = randomUUID; exports.uint8ArrayToString = uint8ArrayToString; exports.stringToUint8Array = stringToUint8Array; const tslib_1$2 = require_tslib(); const tspRuntime = tslib_1$2.__importStar(require_internal()); var aborterUtils_js_1 = require_aborterUtils(); Object.defineProperty(exports, "cancelablePromiseRace", { enumerable: true, get: function() { return aborterUtils_js_1.cancelablePromiseRace; } }); var createAbortablePromise_js_1 = require_createAbortablePromise(); Object.defineProperty(exports, "createAbortablePromise", { enumerable: true, get: function() { return createAbortablePromise_js_1.createAbortablePromise; } }); var delay_js_1 = require_delay(); Object.defineProperty(exports, "delay", { enumerable: true, get: function() { return delay_js_1.delay; } }); var error_js_1 = require_error(); Object.defineProperty(exports, "getErrorMessage", { enumerable: true, get: function() { return error_js_1.getErrorMessage; } }); var typeGuards_js_1 = require_typeGuards(); Object.defineProperty(exports, "isDefined", { enumerable: true, get: function() { return typeGuards_js_1.isDefined; } }); Object.defineProperty(exports, "isObjectWithProperties", { enumerable: true, get: function() { return typeGuards_js_1.isObjectWithProperties; } }); Object.defineProperty(exports, "objectHasProperty", { enumerable: true, get: function() { return typeGuards_js_1.objectHasProperty; } }); /** * Calculates the delay interval for retry attempts using exponential delay with jitter. * * @param retryAttempt - The current retry attempt number. * * @param config - The exponential retry configuration. * * @returns An object containing the calculated retry delay. */ function calculateRetryDelay(retryAttempt, config) { return tspRuntime.calculateRetryDelay(retryAttempt, config); } /** * Generates a SHA-256 hash. * * @param content - The data to be included in the hash. * * @param encoding - The textual encoding to use for the returned hash. */ function computeSha256Hash(content, encoding) { return tspRuntime.computeSha256Hash(content, encoding); } /** * Generates a SHA-256 HMAC signature. * * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. * * @param stringToSign - The data to be signed. * * @param encoding - The textual encoding to use for the returned HMAC digest. */ function computeSha256Hmac(key, stringToSign, encoding) { return tspRuntime.computeSha256Hmac(key, stringToSign, encoding); } /** * Returns a random integer value between a lower and upper bound, inclusive of both bounds. Note that this uses Math.random and isn't secure. If you need to use this for any kind of security purpose, find a better source of random. * * @param min - The smallest integer value allowed. * * @param max - The largest integer value allowed. */ function getRandomIntegerInclusive(min, max) { return tspRuntime.getRandomIntegerInclusive(min, max); } /** * Typeguard for an error object shape (has name and message) * * @param e - Something caught by a catch clause. */ function isError(e) { return tspRuntime.isError(e); } /** * Helper to determine when an input is a generic JS object. * * @returns true when input is an object type that is not null, Array, RegExp, or Date. */ function isObject(input) { return tspRuntime.isObject(input); } /** * Generated Universally Unique Identifier * * @returns RFC4122 v4 UUID. */ function randomUUID() { return tspRuntime.randomUUID(); } /** * A constant that indicates whether the environment the code is running is a Web Browser. */ exports.isBrowser = tspRuntime.isBrowser; /** * A constant that indicates whether the environment the code is running is Bun.sh. */ exports.isBun = tspRuntime.isBun; /** * A constant that indicates whether the environment the code is running is Deno. */ exports.isDeno = tspRuntime.isDeno; /** * A constant that indicates whether the environment the code is running is a Node.js compatible environment. * * @deprecated * * Use `isNodeLike` instead. */ exports.isNode = tspRuntime.isNodeLike; /** * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ exports.isNodeLike = tspRuntime.isNodeLike; /** * A constant that indicates whether the environment the code is running is Node.JS. */ exports.isNodeRuntime = tspRuntime.isNodeRuntime; /** * A constant that indicates whether the environment the code is running is in React-Native. */ exports.isReactNative = tspRuntime.isReactNative; /** * A constant that indicates whether the environment the code is running is a Web Worker. */ exports.isWebWorker = tspRuntime.isWebWorker; /** * The helper that transforms bytes with specific character encoding into string * @param bytes - the uint8array bytes * @param format - the format we use to encode the byte * @returns a string of the encoded string */ function uint8ArrayToString(bytes, format) { return tspRuntime.uint8ArrayToString(bytes, format); } /** * The helper that transforms string to specific character encoded bytes array. * @param value - the string to be converted * @param format - the format we use to decode the value * @returns a uint8array */ function stringToUint8Array(value, format) { return tspRuntime.stringToUint8Array(value, format); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js var require_file = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.hasRawContent = hasRawContent; exports.getRawContent = getRawContent; exports.createFileFromStream = createFileFromStream; exports.createFile = createFile; const core_util_1$7 = require_commonjs$9(); function isNodeReadableStream(x) { return Boolean(x && typeof x["pipe"] === "function"); } const unimplementedMethods = { arrayBuffer: () => { throw new Error("Not implemented"); }, bytes: () => { throw new Error("Not implemented"); }, slice: () => { throw new Error("Not implemented"); }, text: () => { throw new Error("Not implemented"); } }; /** * Private symbol used as key on objects created using createFile containing the * original source of the file object. * * This is used in Node to access the original Node stream without using Blob#stream, which * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and * Readable#to/fromWeb in Node versions we support: * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) * * Once these versions are no longer supported, we may be able to stop doing this. * * @internal */ const rawContent = Symbol("rawContent"); /** * Type guard to check if a given object is a blob-like object with a raw content property. */ function hasRawContent(x) { return typeof x[rawContent] === "function"; } /** * Extract the raw content from a given blob-like object. If the input was created using createFile * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. * For true instances of Blob and File, returns the actual blob. * * @internal */ function getRawContent(blob) { if (hasRawContent(blob)) return blob[rawContent](); else return blob; } /** * Create an object that implements the File interface. This object is intended to be * passed into RequestBodyType.formData, and is not guaranteed to work as expected in * other situations. * * Use this function to: * - Create a File object for use in RequestBodyType.formData in environments where the * global File object is unavailable. * - Create a File-like object from a readable stream without reading the stream into memory. * * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is * passed in a request's form data map, the stream will not be read into memory * and instead will be streamed when the request is made. In the event of a retry, the * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. * @param name - the name of the file. * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ function createFileFromStream(stream$3, name, options = {}) { var _a$2, _b$1, _c$1, _d$1; return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a$2 = options.type) !== null && _a$2 !== void 0 ? _a$2 : "", lastModified: (_b$1 = options.lastModified) !== null && _b$1 !== void 0 ? _b$1 : new Date().getTime(), webkitRelativePath: (_c$1 = options.webkitRelativePath) !== null && _c$1 !== void 0 ? _c$1 : "", size: (_d$1 = options.size) !== null && _d$1 !== void 0 ? _d$1 : -1, name, stream: () => { const s$1 = stream$3(); if (isNodeReadableStream(s$1)) throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); return s$1; }, [rawContent]: stream$3 }); } /** * Create an object that implements the File interface. This object is intended to be * passed into RequestBodyType.formData, and is not guaranteed to work as expected in * other situations. * * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. * * @param content - the content of the file as a Uint8Array in memory. * @param name - the name of the file. * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ function createFile(content, name, options = {}) { var _a$2, _b$1, _c$1; if (core_util_1$7.isNodeLike) return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a$2 = options.type) !== null && _a$2 !== void 0 ? _a$2 : "", lastModified: (_b$1 = options.lastModified) !== null && _b$1 !== void 0 ? _b$1 : new Date().getTime(), webkitRelativePath: (_c$1 = options.webkitRelativePath) !== null && _c$1 !== void 0 ? _c$1 : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); else return new File([content], name, options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js var require_multipartPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.multipartPolicyName = void 0; exports.multipartPolicy = multipartPolicy; const policies_1$10 = require_internal$1(); const file_js_1$1 = require_file(); /** * Name of multipart policy */ exports.multipartPolicyName = policies_1$10.multipartPolicyName; /** * Pipeline policy for multipart requests */ function multipartPolicy() { const tspPolicy = (0, policies_1$10.multipartPolicy)(); return { name: exports.multipartPolicyName, sendRequest: async (request, next) => { if (request.multipartBody) { for (const part of request.multipartBody.parts) if ((0, file_js_1$1.hasRawContent)(part.body)) part.body = (0, file_js_1$1.getRawContent)(part.body); } return tspPolicy.sendRequest(request, next); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js var require_decompressResponsePolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.decompressResponsePolicyName = void 0; exports.decompressResponsePolicy = decompressResponsePolicy; const policies_1$9 = require_internal$1(); /** * The programmatic identifier of the decompressResponsePolicy. */ exports.decompressResponsePolicyName = policies_1$9.decompressResponsePolicyName; /** * A policy to enable response decompression according to Accept-Encoding header * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding */ function decompressResponsePolicy() { return (0, policies_1$9.decompressResponsePolicy)(); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js var require_defaultRetryPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.defaultRetryPolicyName = void 0; exports.defaultRetryPolicy = defaultRetryPolicy; const policies_1$8 = require_internal$1(); /** * Name of the {@link defaultRetryPolicy} */ exports.defaultRetryPolicyName = policies_1$8.defaultRetryPolicyName; /** * A policy that retries according to three strategies: * - When the server sends a 429 response with a Retry-After header. * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. */ function defaultRetryPolicy(options = {}) { return (0, policies_1$8.defaultRetryPolicy)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js var require_formDataPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.formDataPolicyName = void 0; exports.formDataPolicy = formDataPolicy; const policies_1$7 = require_internal$1(); /** * The programmatic identifier of the formDataPolicy. */ exports.formDataPolicyName = policies_1$7.formDataPolicyName; /** * A policy that encodes FormData on the request into the body. */ function formDataPolicy() { return (0, policies_1$7.formDataPolicy)(); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js var require_proxyPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.proxyPolicyName = void 0; exports.getDefaultProxySettings = getDefaultProxySettings; exports.proxyPolicy = proxyPolicy; const policies_1$6 = require_internal$1(); /** * The programmatic identifier of the proxyPolicy. */ exports.proxyPolicyName = policies_1$6.proxyPolicyName; /** * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. * If no argument is given, it attempts to parse a proxy URL from the environment * variables `HTTPS_PROXY` or `HTTP_PROXY`. * @param proxyUrl - The url of the proxy to use. May contain authentication information. * @deprecated - Internally this method is no longer necessary when setting proxy information. */ function getDefaultProxySettings(proxyUrl) { return (0, policies_1$6.getDefaultProxySettings)(proxyUrl); } /** * A policy that allows one to apply proxy settings to all requests. * If not passed static settings, they will be retrieved from the HTTPS_PROXY * or HTTP_PROXY environment variables. * @param proxySettings - ProxySettings to use on each request. * @param options - additional settings, for example, custom NO_PROXY patterns */ function proxyPolicy(proxySettings, options) { return (0, policies_1$6.proxyPolicy)(proxySettings, options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js var require_setClientRequestIdPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.setClientRequestIdPolicyName = void 0; exports.setClientRequestIdPolicy = setClientRequestIdPolicy; /** * The programmatic identifier of the setClientRequestIdPolicy. */ exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; /** * Each PipelineRequest gets a unique id upon creation. * This policy passes that unique id along via an HTTP header to enable better * telemetry and tracing. * @param requestIdHeaderName - The name of the header to pass the request ID to. */ function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { name: exports.setClientRequestIdPolicyName, async sendRequest(request, next) { if (!request.headers.has(requestIdHeaderName)) request.headers.set(requestIdHeaderName, request.requestId); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js var require_agentPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/agentPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.agentPolicyName = void 0; exports.agentPolicy = agentPolicy; const policies_1$5 = require_internal$1(); /** * Name of the Agent Policy */ exports.agentPolicyName = policies_1$5.agentPolicyName; /** * Gets a pipeline policy that sets http.agent */ function agentPolicy(agent) { return (0, policies_1$5.agentPolicy)(agent); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js var require_tlsPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.tlsPolicyName = void 0; exports.tlsPolicy = tlsPolicy; const policies_1$4 = require_internal$1(); /** * Name of the TLS Policy */ exports.tlsPolicyName = policies_1$4.tlsPolicyName; /** * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. */ function tlsPolicy(tlsSettings) { return (0, policies_1$4.tlsPolicy)(tlsSettings); } } }); //#endregion //#region node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js var require_tracingContext = __commonJS({ "node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.TracingContextImpl = exports.knownContextKeys = void 0; exports.createTracingContext = createTracingContext; /** @internal */ exports.knownContextKeys = { span: Symbol.for("@azure/core-tracing span"), namespace: Symbol.for("@azure/core-tracing namespace") }; /** * Creates a new {@link TracingContext} with the given options. * @param options - A set of known keys that may be set on the context. * @returns A new {@link TracingContext} with the given options. * * @internal */ function createTracingContext(options = {}) { let context$2 = new TracingContextImpl(options.parentContext); if (options.span) context$2 = context$2.setValue(exports.knownContextKeys.span, options.span); if (options.namespace) context$2 = context$2.setValue(exports.knownContextKeys.namespace, options.namespace); return context$2; } /** @internal */ var TracingContextImpl = class TracingContextImpl { constructor(initialContext) { this._contextMap = initialContext instanceof TracingContextImpl ? new Map(initialContext._contextMap) : new Map(); } setValue(key, value) { const newContext = new TracingContextImpl(this); newContext._contextMap.set(key, value); return newContext; } getValue(key) { return this._contextMap.get(key); } deleteValue(key) { const newContext = new TracingContextImpl(this); newContext._contextMap.delete(key); return newContext; } }; exports.TracingContextImpl = TracingContextImpl; } }); //#endregion //#region node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/state.js var require_state$1 = __commonJS({ "node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/state.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.state = void 0; /** * @internal * * Holds the singleton instrumenter, to be shared across CJS and ESM imports. */ exports.state = { instrumenterImplementation: void 0 }; } }); //#endregion //#region node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js var require_instrumenter = __commonJS({ "node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createDefaultTracingSpan = createDefaultTracingSpan; exports.createDefaultInstrumenter = createDefaultInstrumenter; exports.useInstrumenter = useInstrumenter; exports.getInstrumenter = getInstrumenter; const tracingContext_js_1$1 = require_tracingContext(); const state_js_1$1 = require_state$1(); function createDefaultTracingSpan() { return { end: () => {}, isRecording: () => false, recordException: () => {}, setAttribute: () => {}, setStatus: () => {}, addEvent: () => {} }; } function createDefaultInstrumenter() { return { createRequestHeaders: () => { return {}; }, parseTraceparentHeader: () => { return void 0; }, startSpan: (_name, spanOptions) => { return { span: createDefaultTracingSpan(), tracingContext: (0, tracingContext_js_1$1.createTracingContext)({ parentContext: spanOptions.tracingContext }) }; }, withContext(_context, callback, ...callbackArgs) { return callback(...callbackArgs); } }; } /** * Extends the Azure SDK with support for a given instrumenter implementation. * * @param instrumenter - The instrumenter implementation to use. */ function useInstrumenter(instrumenter) { state_js_1$1.state.instrumenterImplementation = instrumenter; } /** * Gets the currently set instrumenter, a No-Op instrumenter by default. * * @returns The currently set instrumenter */ function getInstrumenter() { if (!state_js_1$1.state.instrumenterImplementation) state_js_1$1.state.instrumenterImplementation = createDefaultInstrumenter(); return state_js_1$1.state.instrumenterImplementation; } } }); //#endregion //#region node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js var require_tracingClient = __commonJS({ "node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createTracingClient = createTracingClient; const instrumenter_js_1$1 = require_instrumenter(); const tracingContext_js_1 = require_tracingContext(); /** * Creates a new tracing client. * * @param options - Options used to configure the tracing client. * @returns - An instance of {@link TracingClient}. */ function createTracingClient(options) { const { namespace, packageName, packageVersion } = options; function startSpan(name, operationOptions, spanOptions) { var _a$2; const startSpanResult = (0, instrumenter_js_1$1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName, packageVersion, tracingContext: (_a$2 = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a$2 === void 0 ? void 0 : _a$2.tracingContext })); let tracingContext = startSpanResult.tracingContext; const span = startSpanResult.span; if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); const updatedOptions = Object.assign({}, operationOptions, { tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }) }); return { span, updatedOptions }; } async function withSpan(name, operationOptions, callback, spanOptions) { const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); try { const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); span.setStatus({ status: "success" }); return result; } catch (err) { span.setStatus({ status: "error", error: err }); throw err; } finally { span.end(); } } function withContext(context$2, callback, ...callbackArgs) { return (0, instrumenter_js_1$1.getInstrumenter)().withContext(context$2, callback, ...callbackArgs); } /** * Parses a traceparent header value into a span identifier. * * @param traceparentHeader - The traceparent header to parse. * @returns An implementation-specific identifier for the span. */ function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1$1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); } /** * Creates a set of request headers to propagate tracing information to a backend. * * @param tracingContext - The context containing the span to serialize. * @returns The set of headers to add to a request. */ function createRequestHeaders(tracingContext) { return (0, instrumenter_js_1$1.getInstrumenter)().createRequestHeaders(tracingContext); } return { startSpan, withSpan, withContext, parseTraceparentHeader, createRequestHeaders }; } } }); //#endregion //#region node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/index.js var require_commonjs$8 = __commonJS({ "node_modules/.deno/@azure+core-tracing@1.2.0/node_modules/@azure/core-tracing/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createTracingClient = exports.useInstrumenter = void 0; var instrumenter_js_1 = require_instrumenter(); Object.defineProperty(exports, "useInstrumenter", { enumerable: true, get: function() { return instrumenter_js_1.useInstrumenter; } }); var tracingClient_js_1 = require_tracingClient(); Object.defineProperty(exports, "createTracingClient", { enumerable: true, get: function() { return tracingClient_js_1.createTracingClient; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js var require_restError = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.RestError = void 0; exports.isRestError = isRestError; const ts_http_runtime_1$3 = require_commonjs$12(); /** * A custom error type for failed pipeline requests. */ var RestError = class extends Error { constructor(message, options = {}) { super(message); return new ts_http_runtime_1$3.RestError(message, options); } }; exports.RestError = RestError; /** * Something went wrong when making the request. * This means the actual request failed for some reason, * such as a DNS issue or the connection being lost. */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; /** * This means that parsing the response from the server failed. * It may have been malformed. */ RestError.PARSE_ERROR = "PARSE_ERROR"; /** * Typeguard for RestError * @param e - Something caught by a catch clause. */ function isRestError(e) { return (0, ts_http_runtime_1$3.isRestError)(e); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js var require_tracingPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.tracingPolicyName = void 0; exports.tracingPolicy = tracingPolicy; const core_tracing_1 = require_commonjs$8(); const constants_js_1$4 = require_constants$1(); const userAgent_js_1 = require_userAgent(); const log_js_1$4 = require_log$1(); const core_util_1$6 = require_commonjs$9(); const restError_js_1$2 = require_restError(); const util_1$1 = require_internal(); /** * The programmatic identifier of the tracingPolicy. */ exports.tracingPolicyName = "tracingPolicy"; /** * A simple policy to create OpenTelemetry Spans for each request made by the pipeline * that has SpanOptions with a parent. * Requests made without a parent Span will not be recorded. * @param options - Options to configure the telemetry logged by the tracing policy. */ function tracingPolicy(options = {}) { const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); const sanitizer = new util_1$1.Sanitizer({ additionalAllowedQueryParameters: options.additionalAllowedQueryParameters }); const tracingClient$1 = tryCreateTracingClient(); return { name: exports.tracingPolicyName, async sendRequest(request, next) { var _a$2; if (!tracingClient$1) return next(request); const userAgent = await userAgentPromise; const spanAttributes = { "http.url": sanitizer.sanitizeUrl(request.url), "http.method": request.method, "http.user_agent": userAgent, requestId: request.requestId }; if (userAgent) spanAttributes["http.user_agent"] = userAgent; const { span, tracingContext } = (_a$2 = tryCreateSpan(tracingClient$1, request, spanAttributes)) !== null && _a$2 !== void 0 ? _a$2 : {}; if (!span || !tracingContext) return next(request); try { const response = await tracingClient$1.withContext(tracingContext, next, request); tryProcessResponse(span, response); return response; } catch (err) { tryProcessError(span, err); throw err; } } }; } function tryCreateTracingClient() { try { return (0, core_tracing_1.createTracingClient)({ namespace: "", packageName: "@azure/core-rest-pipeline", packageVersion: constants_js_1$4.SDK_VERSION }); } catch (e) { log_js_1$4.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1$6.getErrorMessage)(e)}`); return void 0; } } function tryCreateSpan(tracingClient$1, request, spanAttributes) { try { const { span, updatedOptions } = tracingClient$1.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { spanKind: "client", spanAttributes }); if (!span.isRecording()) { span.end(); return void 0; } const headers = tracingClient$1.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); for (const [key, value] of Object.entries(headers)) request.headers.set(key, value); return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; } catch (e) { log_js_1$4.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1$6.getErrorMessage)(e)}`); return void 0; } } function tryProcessError(span, error) { try { span.setStatus({ status: "error", error: (0, core_util_1$6.isError)(error) ? error : void 0 }); if ((0, restError_js_1$2.isRestError)(error) && error.statusCode) span.setAttribute("http.status_code", error.statusCode); span.end(); } catch (e) { log_js_1$4.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1$6.getErrorMessage)(e)}`); } } function tryProcessResponse(span, response) { try { span.setAttribute("http.status_code", response.status); const serviceRequestId = response.headers.get("x-ms-request-id"); if (serviceRequestId) span.setAttribute("serviceRequestId", serviceRequestId); if (response.status >= 400) span.setStatus({ status: "error" }); span.end(); } catch (e) { log_js_1$4.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1$6.getErrorMessage)(e)}`); } } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js var require_wrapAbortSignal = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/wrapAbortSignal.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.wrapAbortSignalLike = wrapAbortSignalLike; /** * Creates a native AbortSignal which reflects the state of the provided AbortSignalLike. * If the AbortSignalLike is already a native AbortSignal, it is returned as is. * @param abortSignalLike - The AbortSignalLike to wrap. * @returns - An object containing the native AbortSignal and an optional cleanup function. The cleanup function should be called when the AbortSignal is no longer needed. */ function wrapAbortSignalLike(abortSignalLike) { if (abortSignalLike instanceof AbortSignal) return { abortSignal: abortSignalLike }; if (abortSignalLike.aborted) return { abortSignal: AbortSignal.abort(abortSignalLike.reason) }; const controller = new AbortController(); let needsCleanup = true; function cleanup() { if (needsCleanup) { abortSignalLike.removeEventListener("abort", listener); needsCleanup = false; } } function listener() { controller.abort(abortSignalLike.reason); cleanup(); } abortSignalLike.addEventListener("abort", listener); return { abortSignal: controller.signal, cleanup }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js var require_wrapAbortSignalLikePolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/wrapAbortSignalLikePolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.wrapAbortSignalLikePolicyName = void 0; exports.wrapAbortSignalLikePolicy = wrapAbortSignalLikePolicy; const wrapAbortSignal_js_1$1 = require_wrapAbortSignal(); exports.wrapAbortSignalLikePolicyName = "wrapAbortSignalLikePolicy"; /** * Policy that ensure that any AbortSignalLike is wrapped in a native AbortSignal for processing by the pipeline. * Since the ts-http-runtime expects a native AbortSignal, this policy is used to ensure that any AbortSignalLike is wrapped in a native AbortSignal. * * @returns - created policy */ function wrapAbortSignalLikePolicy() { return { name: exports.wrapAbortSignalLikePolicyName, sendRequest: async (request, next) => { if (!request.abortSignal) return next(request); const { abortSignal: abortSignal$1, cleanup } = (0, wrapAbortSignal_js_1$1.wrapAbortSignalLike)(request.abortSignal); request.abortSignal = abortSignal$1; try { return await next(request); } finally { cleanup === null || cleanup === void 0 || cleanup(); } } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js var require_createPipelineFromOptions = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createPipelineFromOptions = createPipelineFromOptions; const logPolicy_js_1$1 = require_logPolicy(); const pipeline_js_1$3 = require_pipeline$1(); const redirectPolicy_js_1$1 = require_redirectPolicy(); const userAgentPolicy_js_1$1 = require_userAgentPolicy(); const multipartPolicy_js_1$1 = require_multipartPolicy(); const decompressResponsePolicy_js_1$1 = require_decompressResponsePolicy(); const defaultRetryPolicy_js_1$1 = require_defaultRetryPolicy(); const formDataPolicy_js_1$1 = require_formDataPolicy(); const core_util_1$5 = require_commonjs$9(); const proxyPolicy_js_1$1 = require_proxyPolicy(); const setClientRequestIdPolicy_js_1$1 = require_setClientRequestIdPolicy(); const agentPolicy_js_1$1 = require_agentPolicy(); const tlsPolicy_js_1$1 = require_tlsPolicy(); const tracingPolicy_js_1$1 = require_tracingPolicy(); const wrapAbortSignalLikePolicy_js_1 = require_wrapAbortSignalLikePolicy(); /** * Create a new pipeline with a default set of customizable policies. * @param options - Options to configure a custom pipeline. */ function createPipelineFromOptions(options) { var _a$2; const pipeline = (0, pipeline_js_1$3.createEmptyPipeline)(); if (core_util_1$5.isNodeLike) { if (options.agent) pipeline.addPolicy((0, agentPolicy_js_1$1.agentPolicy)(options.agent)); if (options.tlsOptions) pipeline.addPolicy((0, tlsPolicy_js_1$1.tlsPolicy)(options.tlsOptions)); pipeline.addPolicy((0, proxyPolicy_js_1$1.proxyPolicy)(options.proxyOptions)); pipeline.addPolicy((0, decompressResponsePolicy_js_1$1.decompressResponsePolicy)()); } pipeline.addPolicy((0, wrapAbortSignalLikePolicy_js_1.wrapAbortSignalLikePolicy)()); pipeline.addPolicy((0, formDataPolicy_js_1$1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1$1.multipartPolicyName] }); pipeline.addPolicy((0, userAgentPolicy_js_1$1.userAgentPolicy)(options.userAgentOptions)); pipeline.addPolicy((0, setClientRequestIdPolicy_js_1$1.setClientRequestIdPolicy)((_a$2 = options.telemetryOptions) === null || _a$2 === void 0 ? void 0 : _a$2.clientRequestIdHeaderName)); pipeline.addPolicy((0, multipartPolicy_js_1$1.multipartPolicy)(), { afterPhase: "Deserialize" }); pipeline.addPolicy((0, defaultRetryPolicy_js_1$1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); pipeline.addPolicy((0, tracingPolicy_js_1$1.tracingPolicy)(Object.assign(Object.assign({}, options.userAgentOptions), options.loggingOptions)), { afterPhase: "Retry" }); if (core_util_1$5.isNodeLike) pipeline.addPolicy((0, redirectPolicy_js_1$1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); pipeline.addPolicy((0, logPolicy_js_1$1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); return pipeline; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js var require_defaultHttpClient = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createDefaultHttpClient = createDefaultHttpClient; const ts_http_runtime_1$2 = require_commonjs$12(); const wrapAbortSignal_js_1 = require_wrapAbortSignal(); /** * Create the correct HttpClient for the current environment. */ function createDefaultHttpClient() { const client = (0, ts_http_runtime_1$2.createDefaultHttpClient)(); return { async sendRequest(request) { const { abortSignal: abortSignal$1, cleanup } = request.abortSignal ? (0, wrapAbortSignal_js_1.wrapAbortSignalLike)(request.abortSignal) : {}; try { request.abortSignal = abortSignal$1; return await client.sendRequest(request); } finally { cleanup === null || cleanup === void 0 || cleanup(); } } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js var require_httpHeaders = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createHttpHeaders = createHttpHeaders; const ts_http_runtime_1$1 = require_commonjs$12(); /** * Creates an object that satisfies the `HttpHeaders` interface. * @param rawHeaders - A simple object representing initial headers */ function createHttpHeaders(rawHeaders) { return (0, ts_http_runtime_1$1.createHttpHeaders)(rawHeaders); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js var require_pipelineRequest = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createPipelineRequest = createPipelineRequest; const ts_http_runtime_1 = require_commonjs$12(); /** * Creates a new pipeline request with the given options. * This method is to allow for the easy setting of default values and not required. * @param options - The options to create the request with. */ function createPipelineRequest(options) { return (0, ts_http_runtime_1.createPipelineRequest)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js var require_exponentialRetryPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.exponentialRetryPolicyName = void 0; exports.exponentialRetryPolicy = exponentialRetryPolicy; const policies_1$3 = require_internal$1(); /** * The programmatic identifier of the exponentialRetryPolicy. */ exports.exponentialRetryPolicyName = policies_1$3.exponentialRetryPolicyName; /** * A policy that attempts to retry requests while introducing an exponentially increasing delay. * @param options - Options that configure retry logic. */ function exponentialRetryPolicy(options = {}) { return (0, policies_1$3.exponentialRetryPolicy)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js var require_systemErrorRetryPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.systemErrorRetryPolicyName = void 0; exports.systemErrorRetryPolicy = systemErrorRetryPolicy; const policies_1$2 = require_internal$1(); /** * Name of the {@link systemErrorRetryPolicy} */ exports.systemErrorRetryPolicyName = policies_1$2.systemErrorRetryPolicyName; /** * A retry policy that specifically seeks to handle errors in the * underlying transport layer (e.g. DNS lookup failures) rather than * retryable error codes from the server itself. * @param options - Options that customize the policy. */ function systemErrorRetryPolicy(options = {}) { return (0, policies_1$2.systemErrorRetryPolicy)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js var require_throttlingRetryPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.throttlingRetryPolicyName = void 0; exports.throttlingRetryPolicy = throttlingRetryPolicy; const policies_1$1 = require_internal$1(); /** * Name of the {@link throttlingRetryPolicy} */ exports.throttlingRetryPolicyName = policies_1$1.throttlingRetryPolicyName; /** * A policy that retries when the server sends a 429 response with a Retry-After header. * * To learn more, please refer to * https://learn.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://learn.microsoft.com/en-us/azure/azure-subscription-service-limits and * https://learn.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors * * @param options - Options that configure retry logic. */ function throttlingRetryPolicy(options = {}) { return (0, policies_1$1.throttlingRetryPolicy)(options); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js var require_retryPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.retryPolicy = retryPolicy; const logger_1$2 = require_commonjs$11(); const constants_js_1$3 = require_constants$1(); const policies_1 = require_internal$1(); const retryPolicyLogger = (0, logger_1$2.createClientLogger)("core-rest-pipeline retryPolicy"); /** * retryPolicy is a generic policy to enable retrying requests when certain conditions are met */ function retryPolicy(strategies, options = { maxRetries: constants_js_1$3.DEFAULT_RETRY_POLICY_COUNT }) { return (0, policies_1.retryPolicy)(strategies, Object.assign({ logger: retryPolicyLogger }, options)); } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js var require_tokenCycler = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.DEFAULT_CYCLER_OPTIONS = void 0; exports.createTokenCycler = createTokenCycler; const core_util_1$4 = require_commonjs$9(); exports.DEFAULT_CYCLER_OPTIONS = { forcedRefreshWindowInMs: 1e3, retryIntervalInMs: 3e3, refreshWindowInMs: 1e3 * 60 * 2 }; /** * Converts an an unreliable access token getter (which may resolve with null) * into an AccessTokenGetter by retrying the unreliable getter in a regular * interval. * * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. * @returns - A promise that, if it resolves, will resolve with an access token. */ async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { async function tryGetAccessToken() { if (Date.now() < refreshTimeout) try { return await getAccessToken(); } catch (_a$2) { return null; } else { const finalToken = await getAccessToken(); if (finalToken === null) throw new Error("Failed to refresh access token."); return finalToken; } } let token = await tryGetAccessToken(); while (token === null) { await (0, core_util_1$4.delay)(retryIntervalInMs); token = await tryGetAccessToken(); } return token; } /** * Creates a token cycler from a credential, scopes, and optional settings. * * A token cycler represents a way to reliably retrieve a valid access token * from a TokenCredential. It will handle initializing the token, refreshing it * when it nears expiration, and synchronizes refresh attempts to avoid * concurrency hazards. * * @param credential - the underlying TokenCredential that provides the access * token * @param tokenCyclerOptions - optionally override default settings for the cycler * * @returns - a function that reliably produces a valid access token */ function createTokenCycler(credential, tokenCyclerOptions) { let refreshWorker = null; let token = null; let tenantId; const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** * This little holder defines several predicates that we use to construct * the rules of refreshing the token. */ const cycler = { get isRefreshing() { return refreshWorker !== null; }, get shouldRefresh() { var _a$2; if (cycler.isRefreshing) return false; if ((token === null || token === void 0 ? void 0 : token.refreshAfterTimestamp) && token.refreshAfterTimestamp < Date.now()) return true; return ((_a$2 = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a$2 !== void 0 ? _a$2 : 0) - options.refreshWindowInMs < Date.now(); }, get mustRefresh() { return token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now(); } }; /** * Starts a refresh job or returns the existing job if one is already * running. */ function refresh(scopes, getTokenOptions) { var _a$2; if (!cycler.isRefreshing) { const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); refreshWorker = beginRefresh( tryGetAccessToken, options.retryIntervalInMs, // If we don't have a token, then we should timeout immediately (_a$2 = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a$2 !== void 0 ? _a$2 : Date.now() ).then((_token) => { refreshWorker = null; token = _token; tenantId = getTokenOptions.tenantId; return token; }).catch((reason) => { refreshWorker = null; token = null; tenantId = void 0; throw reason; }); } return refreshWorker; } return async (scopes, tokenOptions) => { const hasClaimChallenge = Boolean(tokenOptions.claims); const tenantIdChanged = tenantId !== tokenOptions.tenantId; if (hasClaimChallenge) token = null; const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; if (mustRefresh) return refresh(scopes, tokenOptions); if (cycler.shouldRefresh) refresh(scopes, tokenOptions); return token; }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js var require_bearerTokenAuthenticationPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.bearerTokenAuthenticationPolicyName = void 0; exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; exports.parseChallenges = parseChallenges; const tokenCycler_js_1$1 = require_tokenCycler(); const log_js_1$3 = require_log$1(); const restError_js_1$1 = require_restError(); /** * The programmatic identifier of the bearerTokenAuthenticationPolicy. */ exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; /** * Try to send the given request. * * When a response is received, returns a tuple of the response received and, if the response was received * inside a thrown RestError, the RestError that was thrown. * * Otherwise, if an error was thrown while sending the request that did not provide an underlying response, it * will be rethrown. */ async function trySendRequest(request, next) { try { return [await next(request), void 0]; } catch (e) { if ((0, restError_js_1$1.isRestError)(e) && e.response) return [e.response, e]; else throw e; } } /** * Default authorize request handler */ async function defaultAuthorizeRequest(options) { const { scopes, getAccessToken, request } = options; const getTokenOptions = { abortSignal: request.abortSignal, tracingOptions: request.tracingOptions, enableCae: true }; const accessToken = await getAccessToken(scopes, getTokenOptions); if (accessToken) options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); } /** * We will retrieve the challenge only if the response status code was 401, * and if the response contained the header "WWW-Authenticate" with a non-empty value. */ function isChallengeResponse(response) { return response.status === 401 && response.headers.has("WWW-Authenticate"); } /** * Re-authorize the request for CAE challenge. * The response containing the challenge is `options.response`. * If this method returns true, the underlying request will be sent once again. */ async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { var _a$2; const { scopes } = onChallengeOptions; const accessToken = await onChallengeOptions.getAccessToken(scopes, { enableCae: true, claims: caeClaims }); if (!accessToken) return false; onChallengeOptions.request.headers.set("Authorization", `${(_a$2 = accessToken.tokenType) !== null && _a$2 !== void 0 ? _a$2 : "Bearer"} ${accessToken.token}`); return true; } /** * A policy that can request a token from a TokenCredential implementation and * then apply it to the Authorization header of a request as a Bearer token. */ function bearerTokenAuthenticationPolicy(options) { var _a$2, _b$1, _c$1; const { credential, scopes, challengeCallbacks } = options; const logger$2 = options.logger || log_js_1$3.logger; const callbacks = { authorizeRequest: (_b$1 = (_a$2 = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) === null || _a$2 === void 0 ? void 0 : _a$2.bind(challengeCallbacks)) !== null && _b$1 !== void 0 ? _b$1 : defaultAuthorizeRequest, authorizeRequestOnChallenge: (_c$1 = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge) === null || _c$1 === void 0 ? void 0 : _c$1.bind(challengeCallbacks) }; const getAccessToken = credential ? (0, tokenCycler_js_1$1.createTokenCycler)( credential /* , options */ ) : () => Promise.resolve(null); return { name: exports.bearerTokenAuthenticationPolicyName, async sendRequest(request, next) { if (!request.url.toLowerCase().startsWith("https://")) throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); await callbacks.authorizeRequest({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, getAccessToken, logger: logger$2 }); let response; let error; let shouldSendRequest; [response, error] = await trySendRequest(request, next); if (isChallengeResponse(response)) { let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); if (claims) { let parsedClaim; try { parsedClaim = atob(claims); } catch (e) { logger$2.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); return response; } shouldSendRequest = await authorizeRequestOnCaeChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], response, request, getAccessToken, logger: logger$2 }, parsedClaim); if (shouldSendRequest) [response, error] = await trySendRequest(request, next); } else if (callbacks.authorizeRequestOnChallenge) { shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, response, getAccessToken, logger: logger$2 }); if (shouldSendRequest) [response, error] = await trySendRequest(request, next); if (isChallengeResponse(response)) { claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); if (claims) { let parsedClaim; try { parsedClaim = atob(claims); } catch (e) { logger$2.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); return response; } shouldSendRequest = await authorizeRequestOnCaeChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], response, request, getAccessToken, logger: logger$2 }, parsedClaim); if (shouldSendRequest) [response, error] = await trySendRequest(request, next); } } } } if (error) throw error; else return response; } }; } /** * Converts: `Bearer a="b", c="d", Pop e="f", g="h"`. * Into: `[ { scheme: 'Bearer', params: { a: 'b', c: 'd' } }, { scheme: 'Pop', params: { e: 'f', g: 'h' } } ]`. * * @internal */ function parseChallenges(challenges) { const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; const paramRegex = /(\w+)="([^"]*)"/g; const parsedChallenges = []; let match$2; while ((match$2 = challengeRegex.exec(challenges)) !== null) { const scheme = match$2[1]; const paramsString = match$2[2]; const params = {}; let paramMatch; while ((paramMatch = paramRegex.exec(paramsString)) !== null) params[paramMatch[1]] = paramMatch[2]; parsedChallenges.push({ scheme, params }); } return parsedChallenges; } /** * Parse a pipeline response and look for a CAE challenge with "Bearer" scheme * Return the value in the header without parsing the challenge * @internal */ function getCaeChallengeClaims(challenges) { var _a$2; if (!challenges) return; const parsedChallenges = parseChallenges(challenges); return (_a$2 = parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")) === null || _a$2 === void 0 ? void 0 : _a$2.params.claims; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js var require_ndJsonPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ndJsonPolicyName = void 0; exports.ndJsonPolicy = ndJsonPolicy; /** * The programmatic identifier of the ndJsonPolicy. */ exports.ndJsonPolicyName = "ndJsonPolicy"; /** * ndJsonPolicy is a policy used to control keep alive settings for every request. */ function ndJsonPolicy() { return { name: exports.ndJsonPolicyName, async sendRequest(request, next) { if (typeof request.body === "string" && request.body.startsWith("[")) { const body$1 = JSON.parse(request.body); if (Array.isArray(body$1)) request.body = body$1.map((item) => JSON.stringify(item) + "\n").join(""); } return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js var require_auxiliaryAuthenticationHeaderPolicy = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.auxiliaryAuthenticationHeaderPolicyName = void 0; exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; const tokenCycler_js_1 = require_tokenCycler(); const log_js_1$2 = require_log$1(); /** * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. */ exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; async function sendAuthorizeRequest(options) { var _a$2, _b$1; const { scopes, getAccessToken, request } = options; const getTokenOptions = { abortSignal: request.abortSignal, tracingOptions: request.tracingOptions }; return (_b$1 = (_a$2 = await getAccessToken(scopes, getTokenOptions)) === null || _a$2 === void 0 ? void 0 : _a$2.token) !== null && _b$1 !== void 0 ? _b$1 : ""; } /** * A policy for external tokens to `x-ms-authorization-auxiliary` header. * This header will be used when creating a cross-tenant application we may need to handle authentication requests * for resources that are in different tenants. * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works */ function auxiliaryAuthenticationHeaderPolicy(options) { const { credentials, scopes } = options; const logger$2 = options.logger || log_js_1$2.logger; const tokenCyclerMap = new WeakMap(); return { name: exports.auxiliaryAuthenticationHeaderPolicyName, async sendRequest(request, next) { if (!request.url.toLowerCase().startsWith("https://")) throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); if (!credentials || credentials.length === 0) { logger$2.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); return next(request); } const tokenPromises = []; for (const credential of credentials) { let getAccessToken = tokenCyclerMap.get(credential); if (!getAccessToken) { getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); tokenCyclerMap.set(credential, getAccessToken); } tokenPromises.push(sendAuthorizeRequest({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, getAccessToken, logger: logger$2 })); } const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); if (auxiliaryTokens.length === 0) { logger$2.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); return next(request); } request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); return next(request); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js var require_commonjs$7 = __commonJS({ "node_modules/.deno/@azure+core-rest-pipeline@1.20.0/node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createFileFromStream = exports.createFile = exports.agentPolicyName = exports.agentPolicy = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0; var pipeline_js_1$2 = require_pipeline$1(); Object.defineProperty(exports, "createEmptyPipeline", { enumerable: true, get: function() { return pipeline_js_1$2.createEmptyPipeline; } }); var createPipelineFromOptions_js_1 = require_createPipelineFromOptions(); Object.defineProperty(exports, "createPipelineFromOptions", { enumerable: true, get: function() { return createPipelineFromOptions_js_1.createPipelineFromOptions; } }); var defaultHttpClient_js_1 = require_defaultHttpClient(); Object.defineProperty(exports, "createDefaultHttpClient", { enumerable: true, get: function() { return defaultHttpClient_js_1.createDefaultHttpClient; } }); var httpHeaders_js_1 = require_httpHeaders(); Object.defineProperty(exports, "createHttpHeaders", { enumerable: true, get: function() { return httpHeaders_js_1.createHttpHeaders; } }); var pipelineRequest_js_1 = require_pipelineRequest(); Object.defineProperty(exports, "createPipelineRequest", { enumerable: true, get: function() { return pipelineRequest_js_1.createPipelineRequest; } }); var restError_js_1 = require_restError(); Object.defineProperty(exports, "RestError", { enumerable: true, get: function() { return restError_js_1.RestError; } }); Object.defineProperty(exports, "isRestError", { enumerable: true, get: function() { return restError_js_1.isRestError; } }); var decompressResponsePolicy_js_1 = require_decompressResponsePolicy(); Object.defineProperty(exports, "decompressResponsePolicy", { enumerable: true, get: function() { return decompressResponsePolicy_js_1.decompressResponsePolicy; } }); Object.defineProperty(exports, "decompressResponsePolicyName", { enumerable: true, get: function() { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } }); var exponentialRetryPolicy_js_1 = require_exponentialRetryPolicy(); Object.defineProperty(exports, "exponentialRetryPolicy", { enumerable: true, get: function() { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } }); Object.defineProperty(exports, "exponentialRetryPolicyName", { enumerable: true, get: function() { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } }); var setClientRequestIdPolicy_js_1 = require_setClientRequestIdPolicy(); Object.defineProperty(exports, "setClientRequestIdPolicy", { enumerable: true, get: function() { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } }); Object.defineProperty(exports, "setClientRequestIdPolicyName", { enumerable: true, get: function() { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } }); var logPolicy_js_1 = require_logPolicy(); Object.defineProperty(exports, "logPolicy", { enumerable: true, get: function() { return logPolicy_js_1.logPolicy; } }); Object.defineProperty(exports, "logPolicyName", { enumerable: true, get: function() { return logPolicy_js_1.logPolicyName; } }); var multipartPolicy_js_1 = require_multipartPolicy(); Object.defineProperty(exports, "multipartPolicy", { enumerable: true, get: function() { return multipartPolicy_js_1.multipartPolicy; } }); Object.defineProperty(exports, "multipartPolicyName", { enumerable: true, get: function() { return multipartPolicy_js_1.multipartPolicyName; } }); var proxyPolicy_js_1 = require_proxyPolicy(); Object.defineProperty(exports, "proxyPolicy", { enumerable: true, get: function() { return proxyPolicy_js_1.proxyPolicy; } }); Object.defineProperty(exports, "proxyPolicyName", { enumerable: true, get: function() { return proxyPolicy_js_1.proxyPolicyName; } }); Object.defineProperty(exports, "getDefaultProxySettings", { enumerable: true, get: function() { return proxyPolicy_js_1.getDefaultProxySettings; } }); var redirectPolicy_js_1 = require_redirectPolicy(); Object.defineProperty(exports, "redirectPolicy", { enumerable: true, get: function() { return redirectPolicy_js_1.redirectPolicy; } }); Object.defineProperty(exports, "redirectPolicyName", { enumerable: true, get: function() { return redirectPolicy_js_1.redirectPolicyName; } }); var systemErrorRetryPolicy_js_1 = require_systemErrorRetryPolicy(); Object.defineProperty(exports, "systemErrorRetryPolicy", { enumerable: true, get: function() { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } }); Object.defineProperty(exports, "systemErrorRetryPolicyName", { enumerable: true, get: function() { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } }); var throttlingRetryPolicy_js_1 = require_throttlingRetryPolicy(); Object.defineProperty(exports, "throttlingRetryPolicy", { enumerable: true, get: function() { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } }); Object.defineProperty(exports, "throttlingRetryPolicyName", { enumerable: true, get: function() { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } }); var retryPolicy_js_1 = require_retryPolicy(); Object.defineProperty(exports, "retryPolicy", { enumerable: true, get: function() { return retryPolicy_js_1.retryPolicy; } }); var tracingPolicy_js_1 = require_tracingPolicy(); Object.defineProperty(exports, "tracingPolicy", { enumerable: true, get: function() { return tracingPolicy_js_1.tracingPolicy; } }); Object.defineProperty(exports, "tracingPolicyName", { enumerable: true, get: function() { return tracingPolicy_js_1.tracingPolicyName; } }); var defaultRetryPolicy_js_1 = require_defaultRetryPolicy(); Object.defineProperty(exports, "defaultRetryPolicy", { enumerable: true, get: function() { return defaultRetryPolicy_js_1.defaultRetryPolicy; } }); var userAgentPolicy_js_1 = require_userAgentPolicy(); Object.defineProperty(exports, "userAgentPolicy", { enumerable: true, get: function() { return userAgentPolicy_js_1.userAgentPolicy; } }); Object.defineProperty(exports, "userAgentPolicyName", { enumerable: true, get: function() { return userAgentPolicy_js_1.userAgentPolicyName; } }); var tlsPolicy_js_1 = require_tlsPolicy(); Object.defineProperty(exports, "tlsPolicy", { enumerable: true, get: function() { return tlsPolicy_js_1.tlsPolicy; } }); Object.defineProperty(exports, "tlsPolicyName", { enumerable: true, get: function() { return tlsPolicy_js_1.tlsPolicyName; } }); var formDataPolicy_js_1 = require_formDataPolicy(); Object.defineProperty(exports, "formDataPolicy", { enumerable: true, get: function() { return formDataPolicy_js_1.formDataPolicy; } }); Object.defineProperty(exports, "formDataPolicyName", { enumerable: true, get: function() { return formDataPolicy_js_1.formDataPolicyName; } }); var bearerTokenAuthenticationPolicy_js_1 = require_bearerTokenAuthenticationPolicy(); Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", { enumerable: true, get: function() { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } }); Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", { enumerable: true, get: function() { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } }); var ndJsonPolicy_js_1 = require_ndJsonPolicy(); Object.defineProperty(exports, "ndJsonPolicy", { enumerable: true, get: function() { return ndJsonPolicy_js_1.ndJsonPolicy; } }); Object.defineProperty(exports, "ndJsonPolicyName", { enumerable: true, get: function() { return ndJsonPolicy_js_1.ndJsonPolicyName; } }); var auxiliaryAuthenticationHeaderPolicy_js_1 = require_auxiliaryAuthenticationHeaderPolicy(); Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", { enumerable: true, get: function() { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } }); Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", { enumerable: true, get: function() { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } }); var agentPolicy_js_1 = require_agentPolicy(); Object.defineProperty(exports, "agentPolicy", { enumerable: true, get: function() { return agentPolicy_js_1.agentPolicy; } }); Object.defineProperty(exports, "agentPolicyName", { enumerable: true, get: function() { return agentPolicy_js_1.agentPolicyName; } }); var file_js_1 = require_file(); Object.defineProperty(exports, "createFile", { enumerable: true, get: function() { return file_js_1.createFile; } }); Object.defineProperty(exports, "createFileFromStream", { enumerable: true, get: function() { return file_js_1.createFileFromStream; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js var require_azureKeyCredential = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AzureKeyCredential = void 0; /** * A static-key-based credential that supports updating * the underlying key value. */ var AzureKeyCredential = class { /** * The value of the key to be used in authentication */ get key() { return this._key; } /** * Create an instance of an AzureKeyCredential for use * with a service client. * * @param key - The initial value of the key to use in authentication */ constructor(key) { if (!key) throw new Error("key must be a non-empty string"); this._key = key; } /** * Change the value of the key. * * Updates will take effect upon the next request after * updating the key value. * * @param newKey - The new key value to be used */ update(newKey) { this._key = newKey; } }; exports.AzureKeyCredential = AzureKeyCredential; } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js var require_keyCredential = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/keyCredential.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isKeyCredential = isKeyCredential; const core_util_1$3 = require_commonjs$9(); /** * Tests an object to determine whether it implements KeyCredential. * * @param credential - The assumed KeyCredential to be tested. */ function isKeyCredential(credential) { return (0, core_util_1$3.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; } } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js var require_azureNamedKeyCredential = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AzureNamedKeyCredential = void 0; exports.isNamedKeyCredential = isNamedKeyCredential; const core_util_1$2 = require_commonjs$9(); /** * A static name/key-based credential that supports updating * the underlying name and key values. */ var AzureNamedKeyCredential = class { /** * The value of the key to be used in authentication. */ get key() { return this._key; } /** * The value of the name to be used in authentication. */ get name() { return this._name; } /** * Create an instance of an AzureNamedKeyCredential for use * with a service client. * * @param name - The initial value of the name to use in authentication. * @param key - The initial value of the key to use in authentication. */ constructor(name, key) { if (!name || !key) throw new TypeError("name and key must be non-empty strings"); this._name = name; this._key = key; } /** * Change the value of the key. * * Updates will take effect upon the next request after * updating the key value. * * @param newName - The new name value to be used. * @param newKey - The new key value to be used. */ update(newName, newKey) { if (!newName || !newKey) throw new TypeError("newName and newKey must be non-empty strings"); this._name = newName; this._key = newKey; } }; exports.AzureNamedKeyCredential = AzureNamedKeyCredential; /** * Tests an object to determine whether it implements NamedKeyCredential. * * @param credential - The assumed NamedKeyCredential to be tested. */ function isNamedKeyCredential(credential) { return (0, core_util_1$2.isObjectWithProperties)(credential, ["name", "key"]) && typeof credential.key === "string" && typeof credential.name === "string"; } } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js var require_azureSASCredential = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.AzureSASCredential = void 0; exports.isSASCredential = isSASCredential; const core_util_1$1 = require_commonjs$9(); /** * A static-signature-based credential that supports updating * the underlying signature value. */ var AzureSASCredential = class { /** * The value of the shared access signature to be used in authentication */ get signature() { return this._signature; } /** * Create an instance of an AzureSASCredential for use * with a service client. * * @param signature - The initial value of the shared access signature to use in authentication */ constructor(signature) { if (!signature) throw new Error("shared access signature must be a non-empty string"); this._signature = signature; } /** * Change the value of the signature. * * Updates will take effect upon the next request after * updating the signature value. * * @param newSignature - The new shared access signature value to be used */ update(newSignature) { if (!newSignature) throw new Error("shared access signature must be a non-empty string"); this._signature = newSignature; } }; exports.AzureSASCredential = AzureSASCredential; /** * Tests an object to determine whether it implements SASCredential. * * @param credential - The assumed SASCredential to be tested. */ function isSASCredential(credential) { return (0, core_util_1$1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"; } } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js var require_tokenCredential = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/tokenCredential.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isBearerToken = isBearerToken; exports.isPopToken = isPopToken; exports.isTokenCredential = isTokenCredential; /** * @internal * @param accessToken - Access token * @returns Whether a token is bearer type or not */ function isBearerToken(accessToken) { return !accessToken.tokenType || accessToken.tokenType === "Bearer"; } /** * @internal * @param accessToken - Access token * @returns Whether a token is Pop token or not */ function isPopToken(accessToken) { return accessToken.tokenType === "pop"; } /** * Tests an object to determine whether it implements TokenCredential. * * @param credential - The assumed TokenCredential to be tested. */ function isTokenCredential(credential) { const castCredential = credential; return castCredential && typeof castCredential.getToken === "function" && (castCredential.signRequest === void 0 || castCredential.getToken.length > 0); } } }); //#endregion //#region node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/index.js var require_commonjs$6 = __commonJS({ "node_modules/.deno/@azure+core-auth@1.9.0/node_modules/@azure/core-auth/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0; var azureKeyCredential_js_1 = require_azureKeyCredential(); Object.defineProperty(exports, "AzureKeyCredential", { enumerable: true, get: function() { return azureKeyCredential_js_1.AzureKeyCredential; } }); var keyCredential_js_1 = require_keyCredential(); Object.defineProperty(exports, "isKeyCredential", { enumerable: true, get: function() { return keyCredential_js_1.isKeyCredential; } }); var azureNamedKeyCredential_js_1 = require_azureNamedKeyCredential(); Object.defineProperty(exports, "AzureNamedKeyCredential", { enumerable: true, get: function() { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } }); Object.defineProperty(exports, "isNamedKeyCredential", { enumerable: true, get: function() { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } }); var azureSASCredential_js_1 = require_azureSASCredential(); Object.defineProperty(exports, "AzureSASCredential", { enumerable: true, get: function() { return azureSASCredential_js_1.AzureSASCredential; } }); Object.defineProperty(exports, "isSASCredential", { enumerable: true, get: function() { return azureSASCredential_js_1.isSASCredential; } }); var tokenCredential_js_1 = require_tokenCredential(); Object.defineProperty(exports, "isTokenCredential", { enumerable: true, get: function() { return tokenCredential_js_1.isTokenCredential; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js var require_disableKeepAlivePolicy = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.disableKeepAlivePolicyName = void 0; exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; function createDisableKeepAlivePolicy() { return { name: exports.disableKeepAlivePolicyName, async sendRequest(request, next) { request.disableKeepAlive = true; return next(request); } }; } /** * @internal */ function pipelineContainsDisableKeepAlivePolicy(pipeline) { return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName); } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/base64.js var require_base64$1 = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/base64.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.encodeString = encodeString; exports.encodeByteArray = encodeByteArray; exports.decodeString = decodeString; exports.decodeStringToString = decodeStringToString; /** * Encodes a string in base64 format. * @param value - the string to encode * @internal */ function encodeString(value) { return Buffer.from(value).toString("base64"); } /** * Encodes a byte array in base64 format. * @param value - the Uint8Aray to encode * @internal */ function encodeByteArray(value) { const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); return bufferValue.toString("base64"); } /** * Decodes a base64 string into a byte array. * @param value - the base64 string to decode * @internal */ function decodeString(value) { return Buffer.from(value, "base64"); } /** * Decodes a base64 string into a string. * @param value - the base64 string to decode * @internal */ function decodeStringToString(value) { return Buffer.from(value, "base64").toString(); } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/interfaces.js var require_interfaces = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/interfaces.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; /** * Default key used to access the XML attributes. */ exports.XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ exports.XML_CHARKEY = "_"; } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/utils.js var require_utils = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/utils.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isPrimitiveBody = isPrimitiveBody; exports.isDuration = isDuration; exports.isValidUuid = isValidUuid; exports.flattenResponse = flattenResponse; /** * A type guard for a primitive response body. * @param value - Value to test * * @internal */ function isPrimitiveBody(value, mapperTypeName) { return mapperTypeName !== "Composite" && mapperTypeName !== "Dictionary" && (typeof value === "string" || typeof value === "number" || typeof value === "boolean" || (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== null || value === void 0 || value === null); } const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** * Returns true if the given string is in ISO 8601 format. * @param value - The value to be validated for ISO 8601 duration format. * @internal */ function isDuration(value) { return validateISODuration.test(value); } const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; /** * Returns true if the provided uuid is valid. * * @param uuid - The uuid that needs to be validated. * * @internal */ function isValidUuid(uuid) { return validUuidRegex.test(uuid); } /** * Maps the response as follows: * - wraps the response body if needed (typically if its type is primitive). * - returns null if the combination of the headers and the body is empty. * - otherwise, returns the combination of the headers and the body. * * @param responseObject - a representation of the parsed response * @returns the response that will be returned to the user which can be null and/or wrapped * * @internal */ function handleNullableResponseAndWrappableBody(responseObject) { const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); if (responseObject.hasNullableType && Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) return responseObject.shouldWrapBody ? { body: null } : null; else return responseObject.shouldWrapBody ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; } /** * Take a `FullOperationResponse` and turn it into a flat * response object to hand back to the consumer. * @param fullResponse - The processed response from the operation request * @param responseSpec - The response map from the OperationSpec * * @internal */ function flattenResponse(fullResponse, responseSpec) { var _a$2, _b$1; const parsedHeaders = fullResponse.parsedHeaders; if (fullResponse.request.method === "HEAD") return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); const bodyMapper = responseSpec && responseSpec.bodyMapper; const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; /** If the body is asked for, we look at the expected body type to handle it */ if (expectedBodyTypeName === "Stream") return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); const modelProperties = expectedBodyTypeName === "Composite" && bodyMapper.type.modelProperties || {}; const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); if (expectedBodyTypeName === "Sequence" || isPageableResponse) { const arrayResponse = (_a$2 = fullResponse.parsedBody) !== null && _a$2 !== void 0 ? _a$2 : []; for (const key of Object.keys(modelProperties)) if (modelProperties[key].serializedName) arrayResponse[key] = (_b$1 = fullResponse.parsedBody) === null || _b$1 === void 0 ? void 0 : _b$1[key]; if (parsedHeaders) for (const key of Object.keys(parsedHeaders)) arrayResponse[key] = parsedHeaders[key]; return isNullable && !fullResponse.parsedBody && !parsedHeaders && Object.getOwnPropertyNames(modelProperties).length === 0 ? null : arrayResponse; } return handleNullableResponseAndWrappableBody({ body: fullResponse.parsedBody, headers: parsedHeaders, hasNullableType: isNullable, shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName) }); } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serializer.js var require_serializer = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serializer.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.MapperTypeNames = void 0; exports.createSerializer = createSerializer; const tslib_1$1 = require_tslib(); const base64 = tslib_1$1.__importStar(require_base64$1()); const interfaces_js_1$3 = require_interfaces(); const utils_js_1$1 = require_utils(); var SerializerImpl = class { constructor(modelMappers = {}, isXML = false) { this.modelMappers = modelMappers; this.isXML = isXML; } /** * @deprecated Removing the constraints validation on client side. */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value !== void 0 && value !== null) { const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern: Pattern$2, UniqueItems } = mapper.constraints; if (ExclusiveMaximum !== void 0 && value >= ExclusiveMaximum) failValidation("ExclusiveMaximum", ExclusiveMaximum); if (ExclusiveMinimum !== void 0 && value <= ExclusiveMinimum) failValidation("ExclusiveMinimum", ExclusiveMinimum); if (InclusiveMaximum !== void 0 && value > InclusiveMaximum) failValidation("InclusiveMaximum", InclusiveMaximum); if (InclusiveMinimum !== void 0 && value < InclusiveMinimum) failValidation("InclusiveMinimum", InclusiveMinimum); if (MaxItems !== void 0 && value.length > MaxItems) failValidation("MaxItems", MaxItems); if (MaxLength !== void 0 && value.length > MaxLength) failValidation("MaxLength", MaxLength); if (MinItems !== void 0 && value.length < MinItems) failValidation("MinItems", MinItems); if (MinLength !== void 0 && value.length < MinLength) failValidation("MinLength", MinLength); if (MultipleOf !== void 0 && value % MultipleOf !== 0) failValidation("MultipleOf", MultipleOf); if (Pattern$2) { const pattern = typeof Pattern$2 === "string" ? new RegExp(Pattern$2) : Pattern$2; if (typeof value !== "string" || value.match(pattern) === null) failValidation("Pattern", Pattern$2); } if (UniqueItems && value.some((item, i, ar) => ar.indexOf(item) !== i)) failValidation("UniqueItems", UniqueItems); } } /** * Serialize the given object based on its metadata defined in the mapper * * @param mapper - The mapper which defines the metadata of the serializable object * * @param object - A valid Javascript object to be serialized * * @param objectName - Name of the serialized object * * @param options - additional options to serialization * * @returns A valid serialized Javascript object */ serialize(mapper, object, objectName, options = { xml: {} }) { var _a$2, _b$1, _c$1; const updatedOptions = { xml: { rootName: (_a$2 = options.xml.rootName) !== null && _a$2 !== void 0 ? _a$2 : "", includeRoot: (_b$1 = options.xml.includeRoot) !== null && _b$1 !== void 0 ? _b$1 : false, xmlCharKey: (_c$1 = options.xml.xmlCharKey) !== null && _c$1 !== void 0 ? _c$1 : interfaces_js_1$3.XML_CHARKEY } }; let payload = {}; const mapperType = mapper.type.name; if (!objectName) objectName = mapper.serializedName; if (mapperType.match(/^Sequence$/i) !== null) payload = []; if (mapper.isConstant) object = mapper.defaultValue; const { required, nullable } = mapper; if (required && nullable && object === void 0) throw new Error(`${objectName} cannot be undefined.`); if (required && !nullable && (object === void 0 || object === null)) throw new Error(`${objectName} cannot be null or undefined.`); if (!required && nullable === false && object === null) throw new Error(`${objectName} cannot be null.`); if (object === void 0 || object === null) payload = object; else if (mapperType.match(/^any$/i) !== null) payload = object; else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) payload = serializeBasicTypes(mapperType, objectName, object); else if (mapperType.match(/^Enum$/i) !== null) { const enumMapper = mapper; payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) payload = serializeDateTypes(mapperType, object, objectName); else if (mapperType.match(/^ByteArray$/i) !== null) payload = serializeByteArrayType(objectName, object); else if (mapperType.match(/^Base64Url$/i) !== null) payload = serializeBase64UrlType(objectName, object); else if (mapperType.match(/^Sequence$/i) !== null) payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); else if (mapperType.match(/^Dictionary$/i) !== null) payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); else if (mapperType.match(/^Composite$/i) !== null) payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); return payload; } /** * Deserialize the given object based on its metadata defined in the mapper * * @param mapper - The mapper which defines the metadata of the serializable object * * @param responseBody - A valid Javascript entity to be deserialized * * @param objectName - Name of the deserialized object * * @param options - Controls behavior of XML parser and builder. * * @returns A valid deserialized Javascript object */ deserialize(mapper, responseBody, objectName, options = { xml: {} }) { var _a$2, _b$1, _c$1, _d$1; const updatedOptions = { xml: { rootName: (_a$2 = options.xml.rootName) !== null && _a$2 !== void 0 ? _a$2 : "", includeRoot: (_b$1 = options.xml.includeRoot) !== null && _b$1 !== void 0 ? _b$1 : false, xmlCharKey: (_c$1 = options.xml.xmlCharKey) !== null && _c$1 !== void 0 ? _c$1 : interfaces_js_1$3.XML_CHARKEY }, ignoreUnknownProperties: (_d$1 = options.ignoreUnknownProperties) !== null && _d$1 !== void 0 ? _d$1 : false }; if (responseBody === void 0 || responseBody === null) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) responseBody = []; if (mapper.defaultValue !== void 0) responseBody = mapper.defaultValue; return responseBody; } let payload; const mapperType = mapper.type.name; if (!objectName) objectName = mapper.serializedName; if (mapperType.match(/^Composite$/i) !== null) payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); else { if (this.isXML) { const xmlCharKey = updatedOptions.xml.xmlCharKey; /** * If the mapper specifies this as a non-composite type value but the responseBody contains * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. */ if (responseBody[interfaces_js_1$3.XML_ATTRKEY] !== void 0 && responseBody[xmlCharKey] !== void 0) responseBody = responseBody[xmlCharKey]; } if (mapperType.match(/^Number$/i) !== null) { payload = parseFloat(responseBody); if (isNaN(payload)) payload = responseBody; } else if (mapperType.match(/^Boolean$/i) !== null) if (responseBody === "true") payload = true; else if (responseBody === "false") payload = false; else payload = responseBody; else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) payload = responseBody; else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) payload = new Date(responseBody); else if (mapperType.match(/^UnixTime$/i) !== null) payload = unixTimeToDate(responseBody); else if (mapperType.match(/^ByteArray$/i) !== null) payload = base64.decodeString(responseBody); else if (mapperType.match(/^Base64Url$/i) !== null) payload = base64UrlToByteArray(responseBody); else if (mapperType.match(/^Sequence$/i) !== null) payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); else if (mapperType.match(/^Dictionary$/i) !== null) payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); } if (mapper.isConstant) payload = mapper.defaultValue; return payload; } }; /** * Method that creates and returns a Serializer. * @param modelMappers - Known models to map * @param isXML - If XML should be supported */ function createSerializer(modelMappers = {}, isXML = false) { return new SerializerImpl(modelMappers, isXML); } function trimEnd(str, ch) { let len = str.length; while (len - 1 >= 0 && str[len - 1] === ch) --len; return str.substr(0, len); } function bufferToBase64Url(buffer$2) { if (!buffer$2) return void 0; if (!(buffer$2 instanceof Uint8Array)) throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); const str = base64.encodeByteArray(buffer$2); return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) return void 0; if (str && typeof str.valueOf() !== "string") throw new Error("Please provide an input of type string for converting to Uint8Array"); str = str.replace(/-/g, "+").replace(/_/g, "/"); return base64.decodeString(str); } function splitSerializeName(prop) { const classes = []; let partialclass = ""; if (prop) { const subwords = prop.split("."); for (const item of subwords) if (item.charAt(item.length - 1) === "\\") partialclass += item.substr(0, item.length - 1) + "."; else { partialclass += item; classes.push(partialclass); partialclass = ""; } } return classes; } function dateToUnixTime(d$1) { if (!d$1) return void 0; if (typeof d$1.valueOf() === "string") d$1 = new Date(d$1); return Math.floor(d$1.getTime() / 1e3); } function unixTimeToDate(n) { if (!n) return void 0; return new Date(n * 1e3); } function serializeBasicTypes(typeName, objectName, value) { if (value !== null && value !== void 0) { if (typeName.match(/^Number$/i) !== null) { if (typeof value !== "number") throw new Error(`${objectName} with value ${value} must be of type number.`); } else if (typeName.match(/^String$/i) !== null) { if (typeof value.valueOf() !== "string") throw new Error(`${objectName} with value "${value}" must be of type string.`); } else if (typeName.match(/^Uuid$/i) !== null) { if (!(typeof value.valueOf() === "string" && (0, utils_js_1$1.isValidUuid)(value))) throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); } else if (typeName.match(/^Boolean$/i) !== null) { if (typeof value !== "boolean") throw new Error(`${objectName} with value ${value} must be of type boolean.`); } else if (typeName.match(/^Stream$/i) !== null) { const objectType = typeof value; if (objectType !== "string" && typeof value.pipe !== "function" && typeof value.tee !== "function" && !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && objectType !== "function") throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); } } return value; } function serializeEnumType(objectName, allowedValues, value) { if (!allowedValues) throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); const isPresent = allowedValues.some((item) => { if (typeof item.valueOf() === "string") return item.toLowerCase() === value.toLowerCase(); return item === value; }); if (!isPresent) throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); return value; } function serializeByteArrayType(objectName, value) { if (value !== void 0 && value !== null) { if (!(value instanceof Uint8Array)) throw new Error(`${objectName} must be of type Uint8Array.`); value = base64.encodeByteArray(value); } return value; } function serializeBase64UrlType(objectName, value) { if (value !== void 0 && value !== null) { if (!(value instanceof Uint8Array)) throw new Error(`${objectName} must be of type Uint8Array.`); value = bufferToBase64Url(value); } return value; } function serializeDateTypes(typeName, value, objectName) { if (value !== void 0 && value !== null) { if (typeName.match(/^Date$/i) !== null) { if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); value = value instanceof Date ? value.toISOString().substring(0, 10) : new Date(value).toISOString().substring(0, 10); } else if (typeName.match(/^DateTime$/i) !== null) { if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); } else if (typeName.match(/^UnixTime$/i) !== null) { if (!(value instanceof Date || typeof value.valueOf() === "string" && !isNaN(Date.parse(value)))) throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format for it to be serialized in UnixTime/Epoch format.`); value = dateToUnixTime(value); } else if (typeName.match(/^TimeSpan$/i) !== null) { if (!(0, utils_js_1$1.isDuration)(value)) throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); } } return value; } function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { var _a$2; if (!Array.isArray(object)) throw new Error(`${objectName} must be of type Array.`); let elementType = mapper.type.element; if (!elementType || typeof elementType !== "object") throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}.`); if (elementType.type.name === "Composite" && elementType.type.className) elementType = (_a$2 = serializer.modelMappers[elementType.type.className]) !== null && _a$2 !== void 0 ? _a$2 : elementType; const tempArray = []; for (let i = 0; i < object.length; i++) { const serializedValue = serializer.serialize(elementType, object[i], objectName, options); if (isXml && elementType.xmlNamespace) { const xmlnsKey = elementType.xmlNamespacePrefix ? `xmlns:${elementType.xmlNamespacePrefix}` : "xmlns"; if (elementType.type.name === "Composite") { tempArray[i] = Object.assign({}, serializedValue); tempArray[i][interfaces_js_1$3.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; } else { tempArray[i] = {}; tempArray[i][options.xml.xmlCharKey] = serializedValue; tempArray[i][interfaces_js_1$3.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; } } else tempArray[i] = serializedValue; } return tempArray; } function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { if (typeof object !== "object") throw new Error(`${objectName} must be of type object.`); const valueType = mapper.type.value; if (!valueType || typeof valueType !== "object") throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}.`); const tempDictionary = {}; for (const key of Object.keys(object)) { const serializedValue = serializer.serialize(valueType, object[key], objectName, options); tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); } if (isXml && mapper.xmlNamespace) { const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; const result = tempDictionary; result[interfaces_js_1$3.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; return result; } return tempDictionary; } /** * Resolves the additionalProperties property from a referenced mapper * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve * @param objectName - name of the object being serialized */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; if (!additionalProperties && mapper.type.className) { const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; } return additionalProperties; } /** * Finds the mapper referenced by className * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve * @param objectName - name of the object being serialized */ function resolveReferencedMapper(serializer, mapper, objectName) { const className = mapper.type.className; if (!className) throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, void 0, 2)}".`); return serializer.modelMappers[className]; } /** * Resolves a composite mapper's modelProperties. * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve */ function resolveModelProperties(serializer, mapper, objectName) { let modelProps = mapper.type.modelProperties; if (!modelProps) { const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) throw new Error(`modelProperties cannot be null or undefined in the mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); } return modelProps; } function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); if (object !== void 0 && object !== null) { const payload = {}; const modelProps = resolveModelProperties(serializer, mapper, objectName); for (const key of Object.keys(modelProps)) { const propertyMapper = modelProps[key]; if (propertyMapper.readOnly) continue; let propName; let parentObject = payload; if (serializer.isXML) if (propertyMapper.xmlIsWrapped) propName = propertyMapper.xmlName; else propName = propertyMapper.xmlElementName || propertyMapper.xmlName; else { const paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); for (const pathName of paths) { const childObject = parentObject[pathName]; if ((childObject === void 0 || childObject === null) && (object[key] !== void 0 && object[key] !== null || propertyMapper.defaultValue !== void 0)) parentObject[pathName] = {}; parentObject = parentObject[pathName]; } } if (parentObject !== void 0 && parentObject !== null) { if (isXml && mapper.xmlNamespace) { const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; parentObject[interfaces_js_1$3.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1$3.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); } const propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; let toSerialize = object[key]; const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && (toSerialize === void 0 || toSerialize === null)) toSerialize = mapper.serializedName; const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); if (serializedValue !== void 0 && propName !== void 0 && propName !== null) { const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); if (isXml && propertyMapper.xmlIsAttribute) { parentObject[interfaces_js_1$3.XML_ATTRKEY] = parentObject[interfaces_js_1$3.XML_ATTRKEY] || {}; parentObject[interfaces_js_1$3.XML_ATTRKEY][propName] = serializedValue; } else if (isXml && propertyMapper.xmlIsWrapped) parentObject[propName] = { [propertyMapper.xmlElementName]: value }; else parentObject[propName] = value; } } } const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { const propNames = Object.keys(modelProps); for (const clientPropName in object) { const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); if (isAdditionalProperty) payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + "[\"" + clientPropName + "\"]", options); } } return payload; } return object; } function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { if (!isXml || !propertyMapper.xmlNamespace) return serializedValue; const xmlnsKey = propertyMapper.xmlNamespacePrefix ? `xmlns:${propertyMapper.xmlNamespacePrefix}` : "xmlns"; const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; if (["Composite"].includes(propertyMapper.type.name)) if (serializedValue[interfaces_js_1$3.XML_ATTRKEY]) return serializedValue; else { const result$1 = Object.assign({}, serializedValue); result$1[interfaces_js_1$3.XML_ATTRKEY] = xmlNamespace; return result$1; } const result = {}; result[options.xml.xmlCharKey] = serializedValue; result[interfaces_js_1$3.XML_ATTRKEY] = xmlNamespace; return result; } function isSpecialXmlProperty(propertyName, options) { return [interfaces_js_1$3.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); } function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { var _a$2, _b$1; const xmlCharKey = (_a$2 = options.xml.xmlCharKey) !== null && _a$2 !== void 0 ? _a$2 : interfaces_js_1$3.XML_CHARKEY; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); const modelProps = resolveModelProperties(serializer, mapper, objectName); let instance = {}; const handledPropertyNames = []; for (const key of Object.keys(modelProps)) { const propertyMapper = modelProps[key]; const paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); const { serializedName, xmlName, xmlElementName } = propertyMapper; let propertyObjectName = objectName; if (serializedName !== "" && serializedName !== void 0) propertyObjectName = objectName + "." + serializedName; const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { const dictionary = {}; for (const headerKey of Object.keys(responseBody)) { if (headerKey.startsWith(headerCollectionPrefix)) dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); handledPropertyNames.push(headerKey); } instance[key] = dictionary; } else if (serializer.isXML) if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1$3.XML_ATTRKEY]) instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1$3.XML_ATTRKEY][xmlName], propertyObjectName, options); else if (propertyMapper.xmlIsMsText) { if (responseBody[xmlCharKey] !== void 0) instance[key] = responseBody[xmlCharKey]; else if (typeof responseBody === "string") instance[key] = responseBody; } else { const propertyName = xmlElementName || xmlName || serializedName; if (propertyMapper.xmlIsWrapped) { const wrapped = responseBody[xmlName]; const elementList = (_b$1 = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b$1 !== void 0 ? _b$1 : []; instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); handledPropertyNames.push(xmlName); } else { const property = responseBody[propertyName]; instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); handledPropertyNames.push(propertyName); } } else { let propertyInstance; let res = responseBody; let steps = 0; for (const item of paths) { if (!res) break; steps++; res = res[item]; } if (res === null && steps < paths.length) res = void 0; propertyInstance = res; const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; if (polymorphicDiscriminator && key === polymorphicDiscriminator.clientName && (propertyInstance === void 0 || propertyInstance === null)) propertyInstance = mapper.serializedName; let serializedValue; if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { propertyInstance = responseBody[key]; const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); for (const [k, v] of Object.entries(instance)) if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) arrayInstance[k] = v; instance = arrayInstance; } else if (propertyInstance !== void 0 || propertyMapper.defaultValue !== void 0) { serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); instance[key] = serializedValue; } } } const additionalPropertiesMapper = mapper.type.additionalProperties; if (additionalPropertiesMapper) { const isAdditionalProperty = (responsePropName) => { for (const clientPropName in modelProps) { const paths = splitSerializeName(modelProps[clientPropName].serializedName); if (paths[0] === responsePropName) return false; } return true; }; for (const responsePropName in responseBody) if (isAdditionalProperty(responsePropName)) instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + "[\"" + responsePropName + "\"]", options); } else if (responseBody && !options.ignoreUnknownProperties) { for (const key of Object.keys(responseBody)) if (instance[key] === void 0 && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) instance[key] = responseBody[key]; } return instance; } function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { const value = mapper.type.value; if (!value || typeof value !== "object") throw new Error(`"value" metadata for a Dictionary must be defined in the mapper and it must of type "object" in ${objectName}`); if (responseBody) { const tempDictionary = {}; for (const key of Object.keys(responseBody)) tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); return tempDictionary; } return responseBody; } function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { var _a$2; let element = mapper.type.element; if (!element || typeof element !== "object") throw new Error(`element" metadata for an Array must be defined in the mapper and it must of type "object" in ${objectName}`); if (responseBody) { if (!Array.isArray(responseBody)) responseBody = [responseBody]; if (element.type.name === "Composite" && element.type.className) element = (_a$2 = serializer.modelMappers[element.type.className]) !== null && _a$2 !== void 0 ? _a$2 : element; const tempArray = []; for (let i = 0; i < responseBody.length; i++) tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); return tempArray; } return responseBody; } function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { const typeNamesToCheck = [typeName]; while (typeNamesToCheck.length) { const currentName = typeNamesToCheck.shift(); const indexDiscriminator = discriminatorValue === currentName ? discriminatorValue : currentName + "." + discriminatorValue; if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) return discriminators[indexDiscriminator]; else for (const [name, mapper] of Object.entries(discriminators)) if (name.startsWith(currentName + ".") && mapper.type.uberParent === currentName && mapper.type.className) typeNamesToCheck.push(mapper.type.className); } return void 0; } function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { var _a$2; const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator) { let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; if (discriminatorName) { if (polymorphicPropertyName === "serializedName") discriminatorName = discriminatorName.replace(/\\/gi, ""); const discriminatorValue = object[discriminatorName]; const typeName = (_a$2 = mapper.type.uberParent) !== null && _a$2 !== void 0 ? _a$2 : mapper.type.className; if (typeof discriminatorValue === "string" && typeName) { const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); if (polymorphicMapper) mapper = polymorphicMapper; } } } return mapper; } function getPolymorphicDiscriminatorRecursively(serializer, mapper) { return mapper.type.polymorphicDiscriminator || getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || getPolymorphicDiscriminatorSafely(serializer, mapper.type.className); } function getPolymorphicDiscriminatorSafely(serializer, typeName) { return typeName && serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator; } /** * Known types of Mappers */ exports.MapperTypeNames = { Base64Url: "Base64Url", Boolean: "Boolean", ByteArray: "ByteArray", Composite: "Composite", Date: "Date", DateTime: "DateTime", DateTimeRfc1123: "DateTimeRfc1123", Dictionary: "Dictionary", Enum: "Enum", Number: "Number", Object: "Object", Sequence: "Sequence", String: "String", Stream: "Stream", TimeSpan: "TimeSpan", UnixTime: "UnixTime" }; } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/state.js var require_state = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/state.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.state = void 0; /** * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports. */ exports.state = { operationRequestMap: new WeakMap() }; } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/operationHelpers.js var require_operationHelpers = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/operationHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; exports.getOperationRequestInfo = getOperationRequestInfo; const state_js_1 = require_state(); /** * @internal * Retrieves the value to use for a given operation argument * @param operationArguments - The arguments passed from the generated client * @param parameter - The parameter description * @param fallbackObject - If something isn't found in the arguments bag, look here. * Generally used to look at the service client properties. */ function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { let parameterPath = parameter.parameterPath; const parameterMapper = parameter.mapper; let value; if (typeof parameterPath === "string") parameterPath = [parameterPath]; if (Array.isArray(parameterPath)) { if (parameterPath.length > 0) if (parameterMapper.isConstant) value = parameterMapper.defaultValue; else { let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); if (!propertySearchResult.propertyFound && fallbackObject) propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); let useDefaultValue = false; if (!propertySearchResult.propertyFound) useDefaultValue = parameterMapper.required || parameterPath[0] === "options" && parameterPath.length === 2; value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; } } else { if (parameterMapper.required) value = {}; for (const propertyName in parameterPath) { const propertyMapper = parameterMapper.type.modelProperties[propertyName]; const propertyPath = parameterPath[propertyName]; const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { parameterPath: propertyPath, mapper: propertyMapper }, fallbackObject); if (propertyValue !== void 0) { if (!value) value = {}; value[propertyName] = propertyValue; } } } return value; } function getPropertyFromParameterPath(parent, parameterPath) { const result = { propertyFound: false }; let i = 0; for (; i < parameterPath.length; ++i) { const parameterPathPart = parameterPath[i]; if (parent && parameterPathPart in parent) parent = parent[parameterPathPart]; else break; } if (i === parameterPath.length) { result.propertyValue = parent; result.propertyFound = true; } return result; } const originalRequestSymbol$1 = Symbol.for("@azure/core-client original request"); function hasOriginalRequest(request) { return originalRequestSymbol$1 in request; } function getOperationRequestInfo(request) { if (hasOriginalRequest(request)) return getOperationRequestInfo(request[originalRequestSymbol$1]); let info = state_js_1.state.operationRequestMap.get(request); if (!info) { info = {}; state_js_1.state.operationRequestMap.set(request, info); } return info; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js var require_deserializationPolicy = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.deserializationPolicyName = void 0; exports.deserializationPolicy = deserializationPolicy; const interfaces_js_1$2 = require_interfaces(); const core_rest_pipeline_1$6 = require_commonjs$7(); const serializer_js_1$3 = require_serializer(); const operationHelpers_js_1$3 = require_operationHelpers(); const defaultJsonContentTypes = ["application/json", "text/json"]; const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; /** * The programmatic identifier of the deserializationPolicy. */ exports.deserializationPolicyName = "deserializationPolicy"; /** * This policy handles parsing out responses according to OperationSpecs on the request. */ function deserializationPolicy(options = {}) { var _a$2, _b$1, _c$1, _d$1, _e, _f, _g; const jsonContentTypes = (_b$1 = (_a$2 = options.expectedContentTypes) === null || _a$2 === void 0 ? void 0 : _a$2.json) !== null && _b$1 !== void 0 ? _b$1 : defaultJsonContentTypes; const xmlContentTypes = (_d$1 = (_c$1 = options.expectedContentTypes) === null || _c$1 === void 0 ? void 0 : _c$1.xml) !== null && _d$1 !== void 0 ? _d$1 : defaultXmlContentTypes; const parseXML$1 = options.parseXML; const serializerOptions = options.serializerOptions; const updatedOptions = { xml: { rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1$2.XML_CHARKEY } }; return { name: exports.deserializationPolicyName, async sendRequest(request, next) { const response = await next(request); return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML$1); } }; } function getOperationResponseMap(parsedResponse) { let result; const request = parsedResponse.request; const operationInfo = (0, operationHelpers_js_1$3.getOperationRequestInfo)(request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; if (operationSpec) if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) result = operationSpec.responses[parsedResponse.status]; else result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); return result; } function shouldDeserializeResponse(parsedResponse) { const request = parsedResponse.request; const operationInfo = (0, operationHelpers_js_1$3.getOperationRequestInfo)(request); const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; let result; if (shouldDeserialize === void 0) result = true; else if (typeof shouldDeserialize === "boolean") result = shouldDeserialize; else result = shouldDeserialize(parsedResponse); return result; } async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML$1) { const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML$1); if (!shouldDeserializeResponse(parsedResponse)) return parsedResponse; const operationInfo = (0, operationHelpers_js_1$3.getOperationRequestInfo)(parsedResponse.request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; if (!operationSpec || !operationSpec.responses) return parsedResponse; const responseSpec = getOperationResponseMap(parsedResponse); const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); if (error) throw error; else if (shouldReturnResponse) return parsedResponse; if (responseSpec) { if (responseSpec.bodyMapper) { let valueToDeserialize = parsedResponse.parsedBody; if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1$3.MapperTypeNames.Sequence) valueToDeserialize = typeof valueToDeserialize === "object" ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] : []; try { parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); } catch (deserializeError) { const restError = new core_rest_pipeline_1$6.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse }); throw restError; } } else if (operationSpec.httpMethod === "HEAD") parsedResponse.parsedBody = response.status >= 200 && response.status < 300; if (responseSpec.headersMapper) parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); } return parsedResponse; } function isOperationSpecEmpty(operationSpec) { const expectedStatusCodes = Object.keys(operationSpec.responses); return expectedStatusCodes.length === 0 || expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"; } function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { var _a$2, _b$1, _c$1, _d$1, _e; const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; if (isExpectedStatusCode) if (responseSpec) { if (!responseSpec.isError) return { error: null, shouldReturnResponse: false }; } else return { error: null, shouldReturnResponse: false }; const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; const initialErrorMessage = ((_a$2 = parsedResponse.request.streamResponseStatusCodes) === null || _a$2 === void 0 ? void 0 : _a$2.has(parsedResponse.status)) ? `Unexpected status code: ${parsedResponse.status}` : parsedResponse.bodyAsText; const error = new core_rest_pipeline_1$6.RestError(initialErrorMessage, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse }); if (!errorResponseSpec && !(((_c$1 = (_b$1 = parsedResponse.parsedBody) === null || _b$1 === void 0 ? void 0 : _b$1.error) === null || _c$1 === void 0 ? void 0 : _c$1.code) && ((_e = (_d$1 = parsedResponse.parsedBody) === null || _d$1 === void 0 ? void 0 : _d$1.error) === null || _e === void 0 ? void 0 : _e.message))) throw error; const defaultBodyMapper = errorResponseSpec === null || errorResponseSpec === void 0 ? void 0 : errorResponseSpec.bodyMapper; const defaultHeadersMapper = errorResponseSpec === null || errorResponseSpec === void 0 ? void 0 : errorResponseSpec.headersMapper; try { if (parsedResponse.parsedBody) { const parsedBody = parsedResponse.parsedBody; let deserializedError; if (defaultBodyMapper) { let valueToDeserialize = parsedBody; if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1$3.MapperTypeNames.Sequence) { valueToDeserialize = []; const elementName = defaultBodyMapper.xmlElementName; if (typeof parsedBody === "object" && elementName) valueToDeserialize = parsedBody[elementName]; } deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); } const internalError = parsedBody.error || deserializedError || parsedBody; error.code = internalError.code; if (internalError.message) error.message = internalError.message; if (defaultBodyMapper) error.response.parsedBody = deserializedError; } if (parsedResponse.headers && defaultHeadersMapper) error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; } return { error, shouldReturnResponse: false }; } async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML$1) { var _a$2; if (!((_a$2 = operationResponse.request.streamResponseStatusCodes) === null || _a$2 === void 0 ? void 0 : _a$2.has(operationResponse.status)) && operationResponse.bodyAsText) { const text = operationResponse.bodyAsText; const contentType$1 = operationResponse.headers.get("Content-Type") || ""; const contentComponents = !contentType$1 ? [] : contentType$1.split(";").map((component) => component.toLowerCase()); try { if (contentComponents.length === 0 || contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { operationResponse.parsedBody = JSON.parse(text); return operationResponse; } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { if (!parseXML$1) throw new Error("Parsing XML not supported."); const body$1 = await parseXML$1(text, opts.xml); operationResponse.parsedBody = body$1; return operationResponse; } } catch (err) { const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; const errCode = err.code || core_rest_pipeline_1$6.RestError.PARSE_ERROR; const e = new core_rest_pipeline_1$6.RestError(msg, { code: errCode, statusCode: operationResponse.status, request: operationResponse.request, response: operationResponse }); throw e; } } return operationResponse; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js var require_interfaceHelpers = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; exports.getPathStringFromParameter = getPathStringFromParameter; const serializer_js_1$2 = require_serializer(); /** * Gets the list of status codes for streaming responses. * @internal */ function getStreamingResponseStatusCodes(operationSpec) { const result = new Set(); for (const statusCode in operationSpec.responses) { const operationResponse = operationSpec.responses[statusCode]; if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === serializer_js_1$2.MapperTypeNames.Stream) result.add(Number(statusCode)); } return result; } /** * Get the path to this parameter's value as a dotted string (a.b.c). * @param parameter - The parameter to get the path string for. * @returns The path to this parameter's value as a dotted string. * @internal */ function getPathStringFromParameter(parameter) { const { parameterPath, mapper } = parameter; let result; if (typeof parameterPath === "string") result = parameterPath; else if (Array.isArray(parameterPath)) result = parameterPath.join("."); else result = mapper.serializedName; return result; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js var require_serializationPolicy = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.serializationPolicyName = void 0; exports.serializationPolicy = serializationPolicy; exports.serializeHeaders = serializeHeaders; exports.serializeRequestBody = serializeRequestBody; const interfaces_js_1$1 = require_interfaces(); const operationHelpers_js_1$2 = require_operationHelpers(); const serializer_js_1$1 = require_serializer(); const interfaceHelpers_js_1$2 = require_interfaceHelpers(); /** * The programmatic identifier of the serializationPolicy. */ exports.serializationPolicyName = "serializationPolicy"; /** * This policy handles assembling the request body and headers using * an OperationSpec and OperationArguments on the request. */ function serializationPolicy(options = {}) { const stringifyXML$1 = options.stringifyXML; return { name: exports.serializationPolicyName, async sendRequest(request, next) { const operationInfo = (0, operationHelpers_js_1$2.getOperationRequestInfo)(request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; if (operationSpec && operationArguments) { serializeHeaders(request, operationArguments, operationSpec); serializeRequestBody(request, operationArguments, operationSpec, stringifyXML$1); } return next(request); } }; } /** * @internal */ function serializeHeaders(request, operationArguments, operationSpec) { var _a$2, _b$1; if (operationSpec.headerParameters) for (const headerParameter of operationSpec.headerParameters) { let headerValue = (0, operationHelpers_js_1$2.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); if (headerValue !== null && headerValue !== void 0 || headerParameter.mapper.required) { headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1$2.getPathStringFromParameter)(headerParameter)); const headerCollectionPrefix = headerParameter.mapper.headerCollectionPrefix; if (headerCollectionPrefix) for (const key of Object.keys(headerValue)) request.headers.set(headerCollectionPrefix + key, headerValue[key]); else request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1$2.getPathStringFromParameter)(headerParameter), headerValue); } } const customHeaders = (_b$1 = (_a$2 = operationArguments.options) === null || _a$2 === void 0 ? void 0 : _a$2.requestOptions) === null || _b$1 === void 0 ? void 0 : _b$1.customHeaders; if (customHeaders) for (const customHeaderName of Object.keys(customHeaders)) request.headers.set(customHeaderName, customHeaders[customHeaderName]); } /** * @internal */ function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML$1 = function() { throw new Error("XML serialization unsupported!"); }) { var _a$2, _b$1, _c$1, _d$1, _e; const serializerOptions = (_a$2 = operationArguments.options) === null || _a$2 === void 0 ? void 0 : _a$2.serializerOptions; const updatedOptions = { xml: { rootName: (_b$1 = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b$1 !== void 0 ? _b$1 : "", includeRoot: (_c$1 = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c$1 !== void 0 ? _c$1 : false, xmlCharKey: (_d$1 = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d$1 !== void 0 ? _d$1 : interfaces_js_1$1.XML_CHARKEY } }; const xmlCharKey = updatedOptions.xml.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { request.body = (0, operationHelpers_js_1$2.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); const bodyMapper = operationSpec.requestBody.mapper; const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable } = bodyMapper; const typeName = bodyMapper.type.name; try { if (request.body !== void 0 && request.body !== null || nullable && request.body === null || required) { const requestBodyParameterPathString = (0, interfaceHelpers_js_1$2.getPathStringFromParameter)(operationSpec.requestBody); request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); const isStream = typeName === serializer_js_1$1.MapperTypeNames.Stream; if (operationSpec.isXML) { const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); if (typeName === serializer_js_1$1.MapperTypeNames.Sequence) request.body = stringifyXML$1(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); else if (!isStream) request.body = stringifyXML$1(value, { rootName: xmlName || serializedName, xmlCharKey }); } else if (typeName === serializer_js_1$1.MapperTypeNames.String && (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) return; else if (!isStream) request.body = JSON.stringify(request.body); } } catch (error) { throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, void 0, " ")}.`); } } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { request.formData = {}; for (const formDataParameter of operationSpec.formDataParameters) { const formDataParameterValue = (0, operationHelpers_js_1$2.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); if (formDataParameterValue !== void 0 && formDataParameterValue !== null) { const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1$2.getPathStringFromParameter)(formDataParameter); request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1$2.getPathStringFromParameter)(formDataParameter), updatedOptions); } } } } /** * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself */ function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { if (xmlNamespace && ![ "Composite", "Sequence", "Dictionary" ].includes(typeName)) { const result = {}; result[options.xml.xmlCharKey] = serializedValue; result[interfaces_js_1$1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; return result; } return serializedValue; } function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { if (!Array.isArray(obj)) obj = [obj]; if (!xmlNamespaceKey || !xmlNamespace) return { [elementName]: obj }; const result = { [elementName]: obj }; result[interfaces_js_1$1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; return result; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/pipeline.js var require_pipeline = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/pipeline.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createClientPipeline = createClientPipeline; const deserializationPolicy_js_1$1 = require_deserializationPolicy(); const core_rest_pipeline_1$5 = require_commonjs$7(); const serializationPolicy_js_1$1 = require_serializationPolicy(); /** * Creates a new Pipeline for use with a Service Client. * Adds in deserializationPolicy by default. * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. * @param options - Options to customize the created pipeline. */ function createClientPipeline(options = {}) { const pipeline = (0, core_rest_pipeline_1$5.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {}); if (options.credentialOptions) pipeline.addPolicy((0, core_rest_pipeline_1$5.bearerTokenAuthenticationPolicy)({ credential: options.credentialOptions.credential, scopes: options.credentialOptions.credentialScopes })); pipeline.addPolicy((0, serializationPolicy_js_1$1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); pipeline.addPolicy((0, deserializationPolicy_js_1$1.deserializationPolicy)(options.deserializationOptions), { phase: "Deserialize" }); return pipeline; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/httpClientCache.js var require_httpClientCache = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/httpClientCache.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient$1; const core_rest_pipeline_1$4 = require_commonjs$7(); let cachedHttpClient; function getCachedDefaultHttpClient$1() { if (!cachedHttpClient) cachedHttpClient = (0, core_rest_pipeline_1$4.createDefaultHttpClient)(); return cachedHttpClient; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/urlHelpers.js var require_urlHelpers = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/urlHelpers.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getRequestUrl = getRequestUrl; exports.appendQueryParams = appendQueryParams; const operationHelpers_js_1$1 = require_operationHelpers(); const interfaceHelpers_js_1$1 = require_interfaceHelpers(); const CollectionFormatToDelimiterMap = { CSV: ",", SSV: " ", Multi: "Multi", TSV: " ", Pipes: "|" }; function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { let path$13 = replaceAll(operationSpec.path, urlReplacements); if (operationSpec.path === "/{nextLink}" && path$13.startsWith("/")) path$13 = path$13.substring(1); if (isAbsoluteUrl(path$13)) { requestUrl = path$13; isAbsolutePath = true; } else requestUrl = appendPath(requestUrl, path$13); } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); /** * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` * is an absolute path. This ensures that existing query parameter values in `requestUrl` * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it * is still being built so there is nothing to overwrite. */ requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); return requestUrl; } function replaceAll(input, replacements) { let result = input; for (const [searchValue, replaceValue] of replacements) result = result.split(searchValue).join(replaceValue); return result; } function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { var _a$2; const result = new Map(); if ((_a$2 = operationSpec.urlParameters) === null || _a$2 === void 0 ? void 0 : _a$2.length) for (const urlParameter of operationSpec.urlParameters) { let urlParameterValue = (0, operationHelpers_js_1$1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); const parameterPathString = (0, interfaceHelpers_js_1$1.getPathStringFromParameter)(urlParameter); urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); if (!urlParameter.skipEncoding) urlParameterValue = encodeURIComponent(urlParameterValue); result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); } return result; } function isAbsoluteUrl(url$1) { return url$1.includes("://"); } function appendPath(url$1, pathToAppend) { if (!pathToAppend) return url$1; const parsedUrl = new URL(url$1); let newPath = parsedUrl.pathname; if (!newPath.endsWith("/")) newPath = `${newPath}/`; if (pathToAppend.startsWith("/")) pathToAppend = pathToAppend.substring(1); const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { const path$13 = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); newPath = newPath + path$13; if (search) parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } else newPath = newPath + pathToAppend; parsedUrl.pathname = newPath; return parsedUrl.toString(); } function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { var _a$2; const result = new Map(); const sequenceParams = new Set(); if ((_a$2 = operationSpec.queryParameters) === null || _a$2 === void 0 ? void 0 : _a$2.length) for (const queryParameter of operationSpec.queryParameters) { if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) sequenceParams.add(queryParameter.mapper.serializedName); let queryParameterValue = (0, operationHelpers_js_1$1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); if (queryParameterValue !== void 0 && queryParameterValue !== null || queryParameter.mapper.required) { queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1$1.getPathStringFromParameter)(queryParameter)); const delimiter$1 = queryParameter.collectionFormat ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] : ""; if (Array.isArray(queryParameterValue)) queryParameterValue = queryParameterValue.map((item) => { if (item === null || item === void 0) return ""; return item; }); if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) continue; else if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) queryParameterValue = queryParameterValue.join(delimiter$1); if (!queryParameter.skipEncoding) if (Array.isArray(queryParameterValue)) queryParameterValue = queryParameterValue.map((item) => { return encodeURIComponent(item); }); else queryParameterValue = encodeURIComponent(queryParameterValue); if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) queryParameterValue = queryParameterValue.join(delimiter$1); result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1$1.getPathStringFromParameter)(queryParameter), queryParameterValue); } } return { queryParams: result, sequenceParams }; } function simpleParseQueryParams(queryString) { const result = new Map(); if (!queryString || queryString[0] !== "?") return result; queryString = queryString.slice(1); const pairs = queryString.split("&"); for (const pair of pairs) { const [name, value] = pair.split("=", 2); const existingValue = result.get(name); if (existingValue) if (Array.isArray(existingValue)) existingValue.push(value); else result.set(name, [existingValue, value]); else result.set(name, value); } return result; } /** @internal */ function appendQueryParams(url$1, queryParams, sequenceParams, noOverwrite = false) { if (queryParams.size === 0) return url$1; const parsedUrl = new URL(url$1); const combinedParams = simpleParseQueryParams(parsedUrl.search); for (const [name, value] of queryParams) { const existingValue = combinedParams.get(name); if (Array.isArray(existingValue)) if (Array.isArray(value)) { existingValue.push(...value); const valueSet = new Set(existingValue); combinedParams.set(name, Array.from(valueSet)); } else existingValue.push(value); else if (existingValue) { if (Array.isArray(value)) value.unshift(existingValue); else if (sequenceParams.has(name)) combinedParams.set(name, [existingValue, value]); if (!noOverwrite) combinedParams.set(name, value); } else combinedParams.set(name, value); } const searchPieces = []; for (const [name, value] of combinedParams) if (typeof value === "string") searchPieces.push(`${name}=${value}`); else if (Array.isArray(value)) for (const subValue of value) searchPieces.push(`${name}=${subValue}`); else searchPieces.push(`${name}=${value}`); parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return parsedUrl.toString(); } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/log.js var require_log = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/log.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logger = void 0; const logger_1$1 = require_commonjs$11(); exports.logger = (0, logger_1$1.createClientLogger)("core-client"); } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serviceClient.js var require_serviceClient = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/serviceClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ServiceClient = void 0; const core_rest_pipeline_1$3 = require_commonjs$7(); const pipeline_js_1$1 = require_pipeline(); const utils_js_1 = require_utils(); const httpClientCache_js_1 = require_httpClientCache(); const operationHelpers_js_1 = require_operationHelpers(); const urlHelpers_js_1 = require_urlHelpers(); const interfaceHelpers_js_1 = require_interfaceHelpers(); const log_js_1$1 = require_log(); /** * Initializes a new instance of the ServiceClient. */ var ServiceClient = class { /** * The ServiceClient constructor * @param options - The service client options that govern the behavior of the client. */ constructor(options = {}) { var _a$2, _b$1; this._requestContentType = options.requestContentType; this._endpoint = (_a$2 = options.endpoint) !== null && _a$2 !== void 0 ? _a$2 : options.baseUri; if (options.baseUri) log_js_1$1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); this._allowInsecureConnection = options.allowInsecureConnection; this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); this.pipeline = options.pipeline || createDefaultPipeline(options); if ((_b$1 = options.additionalPolicies) === null || _b$1 === void 0 ? void 0 : _b$1.length) for (const { policy, position } of options.additionalPolicies) { const afterPhase = position === "perRetry" ? "Sign" : void 0; this.pipeline.addPolicy(policy, { afterPhase }); } } /** * Send the provided httpRequest. */ async sendRequest(request) { return this.pipeline.sendRequest(this._httpClient, request); } /** * Send an HTTP request that is populated using the provided OperationSpec. * @typeParam T - The typed result of the request, based on the OperationSpec. * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. * @param operationSpec - The OperationSpec to use to populate the httpRequest. */ async sendOperationRequest(operationArguments, operationSpec) { const endpoint = operationSpec.baseUrl || this._endpoint; if (!endpoint) throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); const url$1 = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this); const request = (0, core_rest_pipeline_1$3.createPipelineRequest)({ url: url$1 }); request.method = operationSpec.httpMethod; const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); operationInfo.operationSpec = operationSpec; operationInfo.operationArguments = operationArguments; const contentType$1 = operationSpec.contentType || this._requestContentType; if (contentType$1 && operationSpec.requestBody) request.headers.set("Content-Type", contentType$1); const options = operationArguments.options; if (options) { const requestOptions = options.requestOptions; if (requestOptions) { if (requestOptions.timeout) request.timeout = requestOptions.timeout; if (requestOptions.onUploadProgress) request.onUploadProgress = requestOptions.onUploadProgress; if (requestOptions.onDownloadProgress) request.onDownloadProgress = requestOptions.onDownloadProgress; if (requestOptions.shouldDeserialize !== void 0) operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; if (requestOptions.allowInsecureConnection) request.allowInsecureConnection = true; } if (options.abortSignal) request.abortSignal = options.abortSignal; if (options.tracingOptions) request.tracingOptions = options.tracingOptions; } if (this._allowInsecureConnection) request.allowInsecureConnection = true; if (request.streamResponseStatusCodes === void 0) request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); try { const rawResponse = await this.sendRequest(request); const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); if (options === null || options === void 0 ? void 0 : options.onResponse) options.onResponse(rawResponse, flatResponse); return flatResponse; } catch (error) { if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { const rawResponse = error.response; const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); error.details = flatResponse; if (options === null || options === void 0 ? void 0 : options.onResponse) options.onResponse(rawResponse, flatResponse, error); } throw error; } } }; exports.ServiceClient = ServiceClient; function createDefaultPipeline(options) { const credentialScopes = getCredentialScopes(options); const credentialOptions = options.credential && credentialScopes ? { credentialScopes, credential: options.credential } : void 0; return (0, pipeline_js_1$1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions })); } function getCredentialScopes(options) { if (options.credentialScopes) return options.credentialScopes; if (options.endpoint) return `${options.endpoint}/.default`; if (options.baseUri) return `${options.baseUri}/.default`; if (options.credential && !options.credentialScopes) throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); return void 0; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js var require_authorizeRequestOnClaimChallenge = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.parseCAEChallenge = parseCAEChallenge; exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; const log_js_1 = require_log(); const base64_js_1 = require_base64$1(); /** * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. * * @internal */ function parseCAEChallenge(challenges) { const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); return bearerChallenges.map((challenge) => { const challengeParts = `${challenge.trim()}, `.split("\", ").filter((x) => x); const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("=\""))); return keyValuePairs.reduce((a, b) => Object.assign(Object.assign({}, a), b), {}); }); } /** * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: * [Continuous Access Evaluation](https://learn.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). * * Call the `bearerTokenAuthenticationPolicy` with the following options: * * ```ts snippet:AuthorizeRequestOnClaimChallenge * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; * * const policy = bearerTokenAuthenticationPolicy({ * challengeCallbacks: { * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge, * }, * scopes: ["https://service/.default"], * }); * ``` * * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. * * Example challenge with claims: * * ``` * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", * error_description="User session has been revoked", * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" * ``` */ async function authorizeRequestOnClaimChallenge(onChallengeOptions) { var _a$2; const { scopes, response } = onChallengeOptions; const logger$2 = onChallengeOptions.logger || log_js_1.logger; const challenge = response.headers.get("WWW-Authenticate"); if (!challenge) { logger$2.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); return false; } const challenges = parseCAEChallenge(challenge) || []; const parsedChallenge = challenges.find((x) => x.claims); if (!parsedChallenge) { logger$2.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); return false; } const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims) }); if (!accessToken) return false; onChallengeOptions.request.headers.set("Authorization", `${(_a$2 = accessToken.tokenType) !== null && _a$2 !== void 0 ? _a$2 : "Bearer"} ${accessToken.token}`); return true; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js var require_authorizeRequestOnTenantChallenge = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.authorizeRequestOnTenantChallenge = void 0; /** * A set of constants used internally when processing requests. */ const Constants = { DefaultScope: "/.default", HeaderConstants: { AUTHORIZATION: "authorization" } }; function isUuid(text) { return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); } /** * Defines a callback to handle auth challenge for Storage APIs. * This implements the bearer challenge process described here: https://learn.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge * Handling has specific features for storage that departs to the general AAD challenge docs. **/ const authorizeRequestOnTenantChallenge = async (challengeOptions) => { var _a$2; const requestOptions = requestToOptions(challengeOptions.request); const challenge = getChallenge(challengeOptions.response); if (challenge) { const challengeInfo = parseChallenge(challenge); const challengeScopes = buildScopes(challengeOptions, challengeInfo); const tenantId = extractTenantId(challengeInfo); if (!tenantId) return false; const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); if (!accessToken) return false; challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `${(_a$2 = accessToken.tokenType) !== null && _a$2 !== void 0 ? _a$2 : "Bearer"} ${accessToken.token}`); return true; } return false; }; exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; /** * Extracts the tenant id from the challenge information * The tenant id is contained in the authorization_uri as the first * path part. */ function extractTenantId(challengeInfo) { const parsedAuthUri = new URL(challengeInfo.authorization_uri); const pathSegments = parsedAuthUri.pathname.split("/"); const tenantId = pathSegments[1]; if (tenantId && isUuid(tenantId)) return tenantId; return void 0; } /** * Builds the authentication scopes based on the information that comes in the * challenge information. Scopes url is present in the resource_id, if it is empty * we keep using the original scopes. */ function buildScopes(challengeOptions, challengeInfo) { if (!challengeInfo.resource_id) return challengeOptions.scopes; const challengeScopes = new URL(challengeInfo.resource_id); challengeScopes.pathname = Constants.DefaultScope; let scope = challengeScopes.toString(); if (scope === "https://disk.azure.com/.default") scope = "https://disk.azure.com//.default"; return [scope]; } /** * We will retrieve the challenge only if the response status code was 401, * and if the response contained the header "WWW-Authenticate" with a non-empty value. */ function getChallenge(response) { const challenge = response.headers.get("WWW-Authenticate"); if (response.status === 401 && challenge) return challenge; return; } /** * Converts: `Bearer a="b" c="d"`. * Into: `[ { a: 'b', c: 'd' }]`. * * @internal */ function parseChallenge(challenge) { const bearerChallenge = challenge.slice(7); const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); return keyValuePairs.reduce((a, b) => Object.assign(Object.assign({}, a), b), {}); } /** * Extracts the options form a Pipeline Request for later re-use */ function requestToOptions(request) { return { abortSignal: request.abortSignal, requestOptions: { timeout: request.timeout }, tracingOptions: request.tracingOptions }; } } }); //#endregion //#region node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/index.js var require_commonjs$5 = __commonJS({ "node_modules/.deno/@azure+core-client@1.9.4/node_modules/@azure/core-client/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0; var serializer_js_1 = require_serializer(); Object.defineProperty(exports, "createSerializer", { enumerable: true, get: function() { return serializer_js_1.createSerializer; } }); Object.defineProperty(exports, "MapperTypeNames", { enumerable: true, get: function() { return serializer_js_1.MapperTypeNames; } }); var serviceClient_js_1 = require_serviceClient(); Object.defineProperty(exports, "ServiceClient", { enumerable: true, get: function() { return serviceClient_js_1.ServiceClient; } }); var pipeline_js_1 = require_pipeline(); Object.defineProperty(exports, "createClientPipeline", { enumerable: true, get: function() { return pipeline_js_1.createClientPipeline; } }); var interfaces_js_1 = require_interfaces(); Object.defineProperty(exports, "XML_ATTRKEY", { enumerable: true, get: function() { return interfaces_js_1.XML_ATTRKEY; } }); Object.defineProperty(exports, "XML_CHARKEY", { enumerable: true, get: function() { return interfaces_js_1.XML_CHARKEY; } }); var deserializationPolicy_js_1 = require_deserializationPolicy(); Object.defineProperty(exports, "deserializationPolicy", { enumerable: true, get: function() { return deserializationPolicy_js_1.deserializationPolicy; } }); Object.defineProperty(exports, "deserializationPolicyName", { enumerable: true, get: function() { return deserializationPolicy_js_1.deserializationPolicyName; } }); var serializationPolicy_js_1 = require_serializationPolicy(); Object.defineProperty(exports, "serializationPolicy", { enumerable: true, get: function() { return serializationPolicy_js_1.serializationPolicy; } }); Object.defineProperty(exports, "serializationPolicyName", { enumerable: true, get: function() { return serializationPolicy_js_1.serializationPolicyName; } }); var authorizeRequestOnClaimChallenge_js_1 = require_authorizeRequestOnClaimChallenge(); Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", { enumerable: true, get: function() { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } }); var authorizeRequestOnTenantChallenge_js_1 = require_authorizeRequestOnTenantChallenge(); Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", { enumerable: true, get: function() { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/util.js var require_util$1 = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/util.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.HttpHeaders = void 0; exports.toPipelineRequest = toPipelineRequest; exports.toWebResourceLike = toWebResourceLike; exports.toHttpHeadersLike = toHttpHeadersLike; const core_rest_pipeline_1$2 = require_commonjs$7(); const originalRequestSymbol = Symbol("Original PipelineRequest"); const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); function toPipelineRequest(webResource, options = {}) { const compatWebResource = webResource; const request = compatWebResource[originalRequestSymbol]; const headers = (0, core_rest_pipeline_1$2.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); if (request) { request.headers = headers; return request; } else { const newRequest = (0, core_rest_pipeline_1$2.createPipelineRequest)({ url: webResource.url, method: webResource.method, headers, withCredentials: webResource.withCredentials, timeout: webResource.timeout, requestId: webResource.requestId, abortSignal: webResource.abortSignal, body: webResource.body, formData: webResource.formData, disableKeepAlive: !!webResource.keepAlive, onDownloadProgress: webResource.onDownloadProgress, onUploadProgress: webResource.onUploadProgress, proxySettings: webResource.proxySettings, streamResponseStatusCodes: webResource.streamResponseStatusCodes, agent: webResource.agent, requestOverrides: webResource.requestOverrides }); if (options.originalRequest) newRequest[originalClientRequestSymbol] = options.originalRequest; return newRequest; } } function toWebResourceLike(request, options) { var _a$2; const originalRequest = (_a$2 = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a$2 !== void 0 ? _a$2 : request; const webResource = { url: request.url, method: request.method, headers: toHttpHeadersLike(request.headers), withCredentials: request.withCredentials, timeout: request.timeout, requestId: request.headers.get("x-ms-client-request-id") || request.requestId, abortSignal: request.abortSignal, body: request.body, formData: request.formData, keepAlive: !!request.disableKeepAlive, onDownloadProgress: request.onDownloadProgress, onUploadProgress: request.onUploadProgress, proxySettings: request.proxySettings, streamResponseStatusCodes: request.streamResponseStatusCodes, agent: request.agent, requestOverrides: request.requestOverrides, clone() { throw new Error("Cannot clone a non-proxied WebResourceLike"); }, prepare() { throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); }, validateRequestProperties() { /** do nothing */ } }; if (options === null || options === void 0 ? void 0 : options.createProxy) return new Proxy(webResource, { get(target, prop, receiver) { if (prop === originalRequestSymbol) return request; else if (prop === "clone") return () => { return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { createProxy: true, originalRequest }); }; return Reflect.get(target, prop, receiver); }, set(target, prop, value, receiver) { if (prop === "keepAlive") request.disableKeepAlive = !value; const passThroughProps = [ "url", "method", "withCredentials", "timeout", "requestId", "abortSignal", "body", "formData", "onDownloadProgress", "onUploadProgress", "proxySettings", "streamResponseStatusCodes", "agent", "requestOverrides" ]; if (typeof prop === "string" && passThroughProps.includes(prop)) request[prop] = value; return Reflect.set(target, prop, value, receiver); } }); else return webResource; } /** * Converts HttpHeaders from core-rest-pipeline to look like * HttpHeaders from core-http. * @param headers - HttpHeaders from core-rest-pipeline * @returns HttpHeaders as they looked in core-http */ function toHttpHeadersLike(headers) { return new HttpHeaders(headers.toJSON({ preserveCase: true })); } /** * A collection of HttpHeaders that can be sent with a HTTP request. */ function getHeaderKey(headerName) { return headerName.toLowerCase(); } /** * A collection of HTTP header key/value pairs. */ var HttpHeaders = class HttpHeaders { constructor(rawHeaders) { this._headersMap = {}; if (rawHeaders) for (const headerName in rawHeaders) this.set(headerName, rawHeaders[headerName]); } /** * Set a header in this collection with the provided name and value. The name is * case-insensitive. * @param headerName - The name of the header to set. This value is case-insensitive. * @param headerValue - The value of the header to set. */ set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, value: headerValue.toString() }; } /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param headerName - The name of the header. */ get(headerName) { const header = this._headersMap[getHeaderKey(headerName)]; return !header ? void 0 : header.value; } /** * Get whether or not this header collection contains a header entry for the provided header name. */ contains(headerName) { return !!this._headersMap[getHeaderKey(headerName)]; } /** * Remove the header with the provided headerName. Return whether or not the header existed and * was removed. * @param headerName - The name of the header to remove. */ remove(headerName) { const result = this.contains(headerName); delete this._headersMap[getHeaderKey(headerName)]; return result; } /** * Get the headers that are contained this collection as an object. */ rawHeaders() { return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. */ headersArray() { const headers = []; for (const headerKey in this._headersMap) headers.push(this._headersMap[headerKey]); return headers; } /** * Get the header names that are contained in this collection. */ headerNames() { const headerNames = []; const headers = this.headersArray(); for (let i = 0; i < headers.length; ++i) headerNames.push(headers[i].name); return headerNames; } /** * Get the header values that are contained in this collection. */ headerValues() { const headerValues = []; const headers = this.headersArray(); for (let i = 0; i < headers.length; ++i) headerValues.push(headers[i].value); return headerValues; } /** * Get the JSON object representation of this HTTP header collection. */ toJson(options = {}) { const result = {}; if (options.preserveCase) for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; result[header.name] = header.value; } else for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; result[getHeaderKey(header.name)] = header.value; } return result; } /** * Get the string representation of this HTTP header collection. */ toString() { return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. */ clone() { const resultPreservingCasing = {}; for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; resultPreservingCasing[header.name] = header.value; } return new HttpHeaders(resultPreservingCasing); } }; exports.HttpHeaders = HttpHeaders; } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/response.js var require_response = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/response.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.toCompatResponse = toCompatResponse; exports.toPipelineResponse = toPipelineResponse; const core_rest_pipeline_1$1 = require_commonjs$7(); const util_js_1$3 = require_util$1(); const originalResponse = Symbol("Original FullOperationResponse"); /** * A helper to convert response objects from the new pipeline back to the old one. * @param response - A response object from core-client. * @returns A response compatible with `HttpOperationResponse` from core-http. */ function toCompatResponse(response, options) { let request = (0, util_js_1$3.toWebResourceLike)(response.request); let headers = (0, util_js_1$3.toHttpHeadersLike)(response.headers); if (options === null || options === void 0 ? void 0 : options.createProxy) return new Proxy(response, { get(target, prop, receiver) { if (prop === "headers") return headers; else if (prop === "request") return request; else if (prop === originalResponse) return response; return Reflect.get(target, prop, receiver); }, set(target, prop, value, receiver) { if (prop === "headers") headers = value; else if (prop === "request") request = value; return Reflect.set(target, prop, value, receiver); } }); else return Object.assign(Object.assign({}, response), { request, headers }); } /** * A helper to convert back to a PipelineResponse * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. */ function toPipelineResponse(compatResponse) { const extendedCompatResponse = compatResponse; const response = extendedCompatResponse[originalResponse]; const headers = (0, core_rest_pipeline_1$1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); if (response) { response.headers = headers; return response; } else return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1$3.toPipelineRequest)(compatResponse.request) }); } } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js var require_extendedClient = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ExtendedServiceClient = void 0; const disableKeepAlivePolicy_js_1$1 = require_disableKeepAlivePolicy(); const core_rest_pipeline_1 = require_commonjs$7(); const core_client_1 = require_commonjs$5(); const response_js_1$2 = require_response(); /** * Client to provide compatability between core V1 & V2. */ var ExtendedServiceClient = class extends core_client_1.ServiceClient { constructor(options) { var _a$2, _b$1; super(options); if (((_a$2 = options.keepAliveOptions) === null || _a$2 === void 0 ? void 0 : _a$2.enable) === false && !(0, disableKeepAlivePolicy_js_1$1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1$1.createDisableKeepAlivePolicy)()); if (((_b$1 = options.redirectOptions) === null || _b$1 === void 0 ? void 0 : _b$1.handleRedirects) === false) this.pipeline.removePolicy({ name: core_rest_pipeline_1.redirectPolicyName }); } /** * Compatible send operation request function. * * @param operationArguments - Operation arguments * @param operationSpec - Operation Spec * @returns */ async sendOperationRequest(operationArguments, operationSpec) { var _a$2; const userProvidedCallBack = (_a$2 = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a$2 === void 0 ? void 0 : _a$2.onResponse; let lastResponse; function onResponse(rawResponse, flatResponse, error) { lastResponse = rawResponse; if (userProvidedCallBack) userProvidedCallBack(rawResponse, flatResponse, error); } operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); const result = await super.sendOperationRequest(operationArguments, operationSpec); if (lastResponse) Object.defineProperty(result, "_response", { value: (0, response_js_1$2.toCompatResponse)(lastResponse) }); return result; } }; exports.ExtendedServiceClient = ExtendedServiceClient; } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js var require_requestPolicyFactoryPolicy = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0; exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; const util_js_1$2 = require_util$1(); const response_js_1$1 = require_response(); /** * An enum for compatibility with RequestPolicy */ var HttpPipelineLogLevel; (function(HttpPipelineLogLevel$1) { HttpPipelineLogLevel$1[HttpPipelineLogLevel$1["ERROR"] = 1] = "ERROR"; HttpPipelineLogLevel$1[HttpPipelineLogLevel$1["INFO"] = 3] = "INFO"; HttpPipelineLogLevel$1[HttpPipelineLogLevel$1["OFF"] = 0] = "OFF"; HttpPipelineLogLevel$1[HttpPipelineLogLevel$1["WARNING"] = 2] = "WARNING"; })(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); const mockRequestPolicyOptions = { log(_logLevel, _message) {}, shouldLog(_logLevel) { return false; } }; /** * The name of the RequestPolicyFactoryPolicy */ exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; /** * A policy that wraps policies written for core-http. * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline */ function createRequestPolicyFactoryPolicy(factories) { const orderedFactories = factories.slice().reverse(); return { name: exports.requestPolicyFactoryPolicyName, async sendRequest(request, next) { let httpPipeline = { async sendRequest(httpRequest) { const response$1 = await next((0, util_js_1$2.toPipelineRequest)(httpRequest)); return (0, response_js_1$1.toCompatResponse)(response$1, { createProxy: true }); } }; for (const factory of orderedFactories) httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); const webResourceLike = (0, util_js_1$2.toWebResourceLike)(request, { createProxy: true }); const response = await httpPipeline.sendRequest(webResourceLike); return (0, response_js_1$1.toPipelineResponse)(response); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js var require_httpClientAdapter = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.convertHttpClient = convertHttpClient; const response_js_1 = require_response(); const util_js_1$1 = require_util$1(); /** * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. * @param requestPolicyClient - A HttpClient compatible with core-http * @returns A HttpClient compatible with core-rest-pipeline */ function convertHttpClient(requestPolicyClient) { return { sendRequest: async (request) => { const response = await requestPolicyClient.sendRequest((0, util_js_1$1.toWebResourceLike)(request, { createProxy: true })); return (0, response_js_1.toPipelineResponse)(response); } }; } } }); //#endregion //#region node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/index.js var require_commonjs$4 = __commonJS({ "node_modules/.deno/@azure+core-http-compat@2.3.0/node_modules/@azure/core-http-compat/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0; /** * A Shim Library that provides compatibility between Core V1 & V2 Packages. * * @packageDocumentation */ var extendedClient_js_1 = require_extendedClient(); Object.defineProperty(exports, "ExtendedServiceClient", { enumerable: true, get: function() { return extendedClient_js_1.ExtendedServiceClient; } }); var requestPolicyFactoryPolicy_js_1 = require_requestPolicyFactoryPolicy(); Object.defineProperty(exports, "requestPolicyFactoryPolicyName", { enumerable: true, get: function() { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } }); Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", { enumerable: true, get: function() { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } }); Object.defineProperty(exports, "HttpPipelineLogLevel", { enumerable: true, get: function() { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } }); var disableKeepAlivePolicy_js_1 = require_disableKeepAlivePolicy(); Object.defineProperty(exports, "disableKeepAlivePolicyName", { enumerable: true, get: function() { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } }); var httpClientAdapter_js_1 = require_httpClientAdapter(); Object.defineProperty(exports, "convertHttpClient", { enumerable: true, get: function() { return httpClientAdapter_js_1.convertHttpClient; } }); var util_js_1 = require_util$1(); Object.defineProperty(exports, "toHttpHeadersLike", { enumerable: true, get: function() { return util_js_1.toHttpHeadersLike; } }); } }); //#endregion //#region node_modules/.deno/fast-xml-parser@5.2.3/node_modules/fast-xml-parser/lib/fxp.cjs var require_fxp = __commonJS({ "node_modules/.deno/fast-xml-parser@5.2.3/node_modules/fast-xml-parser/lib/fxp.cjs"(exports, module) { (() => { "use strict"; var t = { d: (e$1, n$1) => { for (var i$1 in n$1) t.o(n$1, i$1) && !t.o(e$1, i$1) && Object.defineProperty(e$1, i$1, { enumerable: !0, get: n$1[i$1] }); }, o: (t$1, e$1) => Object.prototype.hasOwnProperty.call(t$1, e$1), r: (t$1) => { "undefined" != typeof Symbol && Symbol.toStringTag && Object.defineProperty(t$1, Symbol.toStringTag, { value: "Module" }), Object.defineProperty(t$1, "__esModule", { value: !0 }); } }, e = {}; t.r(e), t.d(e, { XMLBuilder: () => ft, XMLParser: () => st, XMLValidator: () => mt }); const n = ":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD", i = new RegExp("^[" + n + "][" + n + "\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$"); function s$1(t$1, e$1) { const n$1 = []; let i$1 = e$1.exec(t$1); for (; i$1;) { const s$2 = []; s$2.startIndex = e$1.lastIndex - i$1[0].length; const r$1 = i$1.length; for (let t$2 = 0; t$2 < r$1; t$2++) s$2.push(i$1[t$2]); n$1.push(s$2), i$1 = e$1.exec(t$1); } return n$1; } const r = function(t$1) { return !(null == i.exec(t$1)); }, o = { allowBooleanAttributes: !1, unpairedTags: [] }; function a(t$1, e$1) { e$1 = Object.assign({}, o, e$1); const n$1 = []; let i$1 = !1, s$2 = !1; "" === t$1[0] && (t$1 = t$1.substr(1)); for (let o$1 = 0; o$1 < t$1.length; o$1++) if ("<" === t$1[o$1] && "?" === t$1[o$1 + 1]) { if (o$1 += 2, o$1 = u(t$1, o$1), o$1.err) return o$1; } else { if ("<" !== t$1[o$1]) { if (l(t$1[o$1])) continue; return x("InvalidChar", "char '" + t$1[o$1] + "' is not expected.", N(t$1, o$1)); } { let a$1 = o$1; if (o$1++, "!" === t$1[o$1]) { o$1 = h$1(t$1, o$1); continue; } { let d$2 = !1; "/" === t$1[o$1] && (d$2 = !0, o$1++); let f$1 = ""; for (; o$1 < t$1.length && ">" !== t$1[o$1] && " " !== t$1[o$1] && " " !== t$1[o$1] && "\n" !== t$1[o$1] && "\r" !== t$1[o$1]; o$1++) f$1 += t$1[o$1]; if (f$1 = f$1.trim(), "/" === f$1[f$1.length - 1] && (f$1 = f$1.substring(0, f$1.length - 1), o$1--), !r(f$1)) { let e$2; return e$2 = 0 === f$1.trim().length ? "Invalid space after '<'." : "Tag '" + f$1 + "' is an invalid name.", x("InvalidTag", e$2, N(t$1, o$1)); } const p$1 = c(t$1, o$1); if (!1 === p$1) return x("InvalidAttr", "Attributes for '" + f$1 + "' have open quote.", N(t$1, o$1)); let b$1 = p$1.value; if (o$1 = p$1.index, "/" === b$1[b$1.length - 1]) { const n$2 = o$1 - b$1.length; b$1 = b$1.substring(0, b$1.length - 1); const s$3 = g(b$1, e$1); if (!0 !== s$3) return x(s$3.err.code, s$3.err.msg, N(t$1, n$2 + s$3.err.line)); i$1 = !0; } else if (d$2) { if (!p$1.tagClosed) return x("InvalidTag", "Closing tag '" + f$1 + "' doesn't have proper closing.", N(t$1, o$1)); if (b$1.trim().length > 0) return x("InvalidTag", "Closing tag '" + f$1 + "' can't have attributes or invalid starting.", N(t$1, a$1)); if (0 === n$1.length) return x("InvalidTag", "Closing tag '" + f$1 + "' has not been opened.", N(t$1, a$1)); { const e$2 = n$1.pop(); if (f$1 !== e$2.tagName) { let n$2 = N(t$1, e$2.tagStartPos); return x("InvalidTag", "Expected closing tag '" + e$2.tagName + "' (opened in line " + n$2.line + ", col " + n$2.col + ") instead of closing tag '" + f$1 + "'.", N(t$1, a$1)); } 0 == n$1.length && (s$2 = !0); } } else { const r$1 = g(b$1, e$1); if (!0 !== r$1) return x(r$1.err.code, r$1.err.msg, N(t$1, o$1 - b$1.length + r$1.err.line)); if (!0 === s$2) return x("InvalidXml", "Multiple possible root nodes found.", N(t$1, o$1)); -1 !== e$1.unpairedTags.indexOf(f$1) || n$1.push({ tagName: f$1, tagStartPos: a$1 }), i$1 = !0; } for (o$1++; o$1 < t$1.length; o$1++) if ("<" === t$1[o$1]) { if ("!" === t$1[o$1 + 1]) { o$1++, o$1 = h$1(t$1, o$1); continue; } if ("?" !== t$1[o$1 + 1]) break; if (o$1 = u(t$1, ++o$1), o$1.err) return o$1; } else if ("&" === t$1[o$1]) { const e$2 = m$1(t$1, o$1); if (-1 == e$2) return x("InvalidChar", "char '&' is not expected.", N(t$1, o$1)); o$1 = e$2; } else if (!0 === s$2 && !l(t$1[o$1])) return x("InvalidXml", "Extra text at the end", N(t$1, o$1)); "<" === t$1[o$1] && o$1--; } } } return i$1 ? 1 == n$1.length ? x("InvalidTag", "Unclosed tag '" + n$1[0].tagName + "'.", N(t$1, n$1[0].tagStartPos)) : !(n$1.length > 0) || x("InvalidXml", "Invalid '" + JSON.stringify(n$1.map((t$2) => t$2.tagName), null, 4).replace(/\r?\n/g, "") + "' found.", { line: 1, col: 1 }) : x("InvalidXml", "Start tag expected.", 1); } function l(t$1) { return " " === t$1 || " " === t$1 || "\n" === t$1 || "\r" === t$1; } function u(t$1, e$1) { const n$1 = e$1; for (; e$1 < t$1.length; e$1++) if ("?" != t$1[e$1] && " " != t$1[e$1]); else { const i$1 = t$1.substr(n$1, e$1 - n$1); if (e$1 > 5 && "xml" === i$1) return x("InvalidXml", "XML declaration allowed only at the start of the document.", N(t$1, e$1)); if ("?" == t$1[e$1] && ">" == t$1[e$1 + 1]) { e$1++; break; } } return e$1; } function h$1(t$1, e$1) { if (t$1.length > e$1 + 5 && "-" === t$1[e$1 + 1] && "-" === t$1[e$1 + 2]) { for (e$1 += 3; e$1 < t$1.length; e$1++) if ("-" === t$1[e$1] && "-" === t$1[e$1 + 1] && ">" === t$1[e$1 + 2]) { e$1 += 2; break; } } else if (t$1.length > e$1 + 8 && "D" === t$1[e$1 + 1] && "O" === t$1[e$1 + 2] && "C" === t$1[e$1 + 3] && "T" === t$1[e$1 + 4] && "Y" === t$1[e$1 + 5] && "P" === t$1[e$1 + 6] && "E" === t$1[e$1 + 7]) { let n$1 = 1; for (e$1 += 8; e$1 < t$1.length; e$1++) if ("<" === t$1[e$1]) n$1++; else if (">" === t$1[e$1] && (n$1--, 0 === n$1)) break; } else if (t$1.length > e$1 + 9 && "[" === t$1[e$1 + 1] && "C" === t$1[e$1 + 2] && "D" === t$1[e$1 + 3] && "A" === t$1[e$1 + 4] && "T" === t$1[e$1 + 5] && "A" === t$1[e$1 + 6] && "[" === t$1[e$1 + 7]) { for (e$1 += 8; e$1 < t$1.length; e$1++) if ("]" === t$1[e$1] && "]" === t$1[e$1 + 1] && ">" === t$1[e$1 + 2]) { e$1 += 2; break; } } return e$1; } const d$1 = "\"", f = "'"; function c(t$1, e$1) { let n$1 = "", i$1 = "", s$2 = !1; for (; e$1 < t$1.length; e$1++) { if (t$1[e$1] === d$1 || t$1[e$1] === f) "" === i$1 ? i$1 = t$1[e$1] : i$1 !== t$1[e$1] || (i$1 = ""); else if (">" === t$1[e$1] && "" === i$1) { s$2 = !0; break; } n$1 += t$1[e$1]; } return "" === i$1 && { value: n$1, index: e$1, tagClosed: s$2 }; } const p = new RegExp("(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['\"])(([\\s\\S])*?)\\5)?", "g"); function g(t$1, e$1) { const n$1 = s$1(t$1, p), i$1 = {}; for (let t$2 = 0; t$2 < n$1.length; t$2++) { if (0 === n$1[t$2][1].length) return x("InvalidAttr", "Attribute '" + n$1[t$2][2] + "' has no space in starting.", E(n$1[t$2])); if (void 0 !== n$1[t$2][3] && void 0 === n$1[t$2][4]) return x("InvalidAttr", "Attribute '" + n$1[t$2][2] + "' is without value.", E(n$1[t$2])); if (void 0 === n$1[t$2][3] && !e$1.allowBooleanAttributes) return x("InvalidAttr", "boolean attribute '" + n$1[t$2][2] + "' is not allowed.", E(n$1[t$2])); const s$2 = n$1[t$2][2]; if (!b(s$2)) return x("InvalidAttr", "Attribute '" + s$2 + "' is an invalid name.", E(n$1[t$2])); if (i$1.hasOwnProperty(s$2)) return x("InvalidAttr", "Attribute '" + s$2 + "' is repeated.", E(n$1[t$2])); i$1[s$2] = 1; } return !0; } function m$1(t$1, e$1) { if (";" === t$1[++e$1]) return -1; if ("#" === t$1[e$1]) return function(t$2, e$2) { let n$2 = /\d/; for ("x" === t$2[e$2] && (e$2++, n$2 = /[\da-fA-F]/); e$2 < t$2.length; e$2++) { if (";" === t$2[e$2]) return e$2; if (!t$2[e$2].match(n$2)) break; } return -1; }(t$1, ++e$1); let n$1 = 0; for (; e$1 < t$1.length; e$1++, n$1++) if (!(t$1[e$1].match(/\w/) && n$1 < 20)) { if (";" === t$1[e$1]) break; return -1; } return e$1; } function x(t$1, e$1, n$1) { return { err: { code: t$1, msg: e$1, line: n$1.line || n$1, col: n$1.col } }; } function b(t$1) { return r(t$1); } function N(t$1, e$1) { const n$1 = t$1.substring(0, e$1).split(/\r?\n/); return { line: n$1.length, col: n$1[n$1.length - 1].length + 1 }; } function E(t$1) { return t$1.startIndex + t$1[1].length; } const v = { preserveOrder: !1, attributeNamePrefix: "@_", attributesGroupName: !1, textNodeName: "#text", ignoreAttributes: !0, removeNSPrefix: !1, allowBooleanAttributes: !1, parseTagValue: !0, parseAttributeValue: !1, trimValues: !0, cdataPropName: !1, numberParseOptions: { hex: !0, leadingZeros: !0, eNotation: !0 }, tagValueProcessor: function(t$1, e$1) { return e$1; }, attributeValueProcessor: function(t$1, e$1) { return e$1; }, stopNodes: [], alwaysCreateTextNode: !1, isArray: () => !1, commentPropName: !1, unpairedTags: [], processEntities: !0, htmlEntities: !1, ignoreDeclaration: !1, ignorePiTags: !1, transformTagName: !1, transformAttributeName: !1, updateTag: function(t$1, e$1, n$1) { return t$1; }, captureMetaData: !1 }; let y$1; y$1 = "function" != typeof Symbol ? "@@xmlMetadata" : Symbol("XML Node Metadata"); class T { constructor(t$1) { this.tagname = t$1, this.child = [], this[":@"] = {}; } add(t$1, e$1) { "__proto__" === t$1 && (t$1 = "#__proto__"), this.child.push({ [t$1]: e$1 }); } addChild(t$1, e$1) { "__proto__" === t$1.tagname && (t$1.tagname = "#__proto__"), t$1[":@"] && Object.keys(t$1[":@"]).length > 0 ? this.child.push({ [t$1.tagname]: t$1.child, ":@": t$1[":@"] }) : this.child.push({ [t$1.tagname]: t$1.child }), void 0 !== e$1 && (this.child[this.child.length - 1][y$1] = { startIndex: e$1 }); } static getMetaDataSymbol() { return y$1; } } function w$1(t$1, e$1) { const n$1 = {}; if ("O" !== t$1[e$1 + 3] || "C" !== t$1[e$1 + 4] || "T" !== t$1[e$1 + 5] || "Y" !== t$1[e$1 + 6] || "P" !== t$1[e$1 + 7] || "E" !== t$1[e$1 + 8]) throw new Error("Invalid Tag instead of DOCTYPE"); { e$1 += 9; let i$1 = 1, s$2 = !1, r$1 = !1, o$1 = ""; for (; e$1 < t$1.length; e$1++) if ("<" !== t$1[e$1] || r$1) if (">" === t$1[e$1]) { if (r$1 ? "-" === t$1[e$1 - 1] && "-" === t$1[e$1 - 2] && (r$1 = !1, i$1--) : i$1--, 0 === i$1) break; } else "[" === t$1[e$1] ? s$2 = !0 : o$1 += t$1[e$1]; else { if (s$2 && C(t$1, "!ENTITY", e$1)) { let i$2, s$3; e$1 += 7, [i$2, s$3, e$1] = O(t$1, e$1 + 1), -1 === s$3.indexOf("&") && (n$1[i$2] = { regx: RegExp(`&${i$2};`, "g"), val: s$3 }); } else if (s$2 && C(t$1, "!ELEMENT", e$1)) { e$1 += 8; const { index: n$2 } = S(t$1, e$1 + 1); e$1 = n$2; } else if (s$2 && C(t$1, "!ATTLIST", e$1)) e$1 += 8; else if (s$2 && C(t$1, "!NOTATION", e$1)) { e$1 += 9; const { index: n$2 } = A(t$1, e$1 + 1); e$1 = n$2; } else { if (!C(t$1, "!--", e$1)) throw new Error("Invalid DOCTYPE"); r$1 = !0; } i$1++, o$1 = ""; } if (0 !== i$1) throw new Error("Unclosed DOCTYPE"); } return { entities: n$1, i: e$1 }; } const P = (t$1, e$1) => { for (; e$1 < t$1.length && /\s/.test(t$1[e$1]);) e$1++; return e$1; }; function O(t$1, e$1) { e$1 = P(t$1, e$1); let n$1 = ""; for (; e$1 < t$1.length && !/\s/.test(t$1[e$1]) && "\"" !== t$1[e$1] && "'" !== t$1[e$1];) n$1 += t$1[e$1], e$1++; if ($(n$1), e$1 = P(t$1, e$1), "SYSTEM" === t$1.substring(e$1, e$1 + 6).toUpperCase()) throw new Error("External entities are not supported"); if ("%" === t$1[e$1]) throw new Error("Parameter entities are not supported"); let i$1 = ""; return [e$1, i$1] = I(t$1, e$1, "entity"), [ n$1, i$1, --e$1 ]; } function A(t$1, e$1) { e$1 = P(t$1, e$1); let n$1 = ""; for (; e$1 < t$1.length && !/\s/.test(t$1[e$1]);) n$1 += t$1[e$1], e$1++; $(n$1), e$1 = P(t$1, e$1); const i$1 = t$1.substring(e$1, e$1 + 6).toUpperCase(); if ("SYSTEM" !== i$1 && "PUBLIC" !== i$1) throw new Error(`Expected SYSTEM or PUBLIC, found "${i$1}"`); e$1 += i$1.length, e$1 = P(t$1, e$1); let s$2 = null, r$1 = null; if ("PUBLIC" === i$1) [e$1, s$2] = I(t$1, e$1, "publicIdentifier"), "\"" !== t$1[e$1 = P(t$1, e$1)] && "'" !== t$1[e$1] || ([e$1, r$1] = I(t$1, e$1, "systemIdentifier")); else if ("SYSTEM" === i$1 && ([e$1, r$1] = I(t$1, e$1, "systemIdentifier"), !r$1)) throw new Error("Missing mandatory system identifier for SYSTEM notation"); return { notationName: n$1, publicIdentifier: s$2, systemIdentifier: r$1, index: --e$1 }; } function I(t$1, e$1, n$1) { let i$1 = ""; const s$2 = t$1[e$1]; if ("\"" !== s$2 && "'" !== s$2) throw new Error(`Expected quoted string, found "${s$2}"`); for (e$1++; e$1 < t$1.length && t$1[e$1] !== s$2;) i$1 += t$1[e$1], e$1++; if (t$1[e$1] !== s$2) throw new Error(`Unterminated ${n$1} value`); return [++e$1, i$1]; } function S(t$1, e$1) { e$1 = P(t$1, e$1); let n$1 = ""; for (; e$1 < t$1.length && !/\s/.test(t$1[e$1]);) n$1 += t$1[e$1], e$1++; if (!$(n$1)) throw new Error(`Invalid element name: "${n$1}"`); let i$1 = ""; if ("E" === t$1[e$1 = P(t$1, e$1)] && C(t$1, "MPTY", e$1)) e$1 += 6; else if ("A" === t$1[e$1] && C(t$1, "NY", e$1)) e$1 += 4; else { if ("(" !== t$1[e$1]) throw new Error(`Invalid Element Expression, found "${t$1[e$1]}"`); for (e$1++; e$1 < t$1.length && ")" !== t$1[e$1];) i$1 += t$1[e$1], e$1++; if (")" !== t$1[e$1]) throw new Error("Unterminated content model"); } return { elementName: n$1, contentModel: i$1.trim(), index: e$1 }; } function C(t$1, e$1, n$1) { for (let i$1 = 0; i$1 < e$1.length; i$1++) if (e$1[i$1] !== t$1[n$1 + i$1 + 1]) return !1; return !0; } function $(t$1) { if (r(t$1)) return t$1; throw new Error(`Invalid entity name ${t$1}`); } const j = /^[-+]?0x[a-fA-F0-9]+$/, D = /^([\-\+])?(0*)([0-9]*(\.[0-9]*)?)$/, V = { hex: !0, leadingZeros: !0, decimalPoint: ".", eNotation: !0 }; const M = /^([-+])?(0*)(\d*(\.\d*)?[eE][-\+]?\d+)$/; function _(t$1) { return "function" == typeof t$1 ? t$1 : Array.isArray(t$1) ? (e$1) => { for (const n$1 of t$1) { if ("string" == typeof n$1 && e$1 === n$1) return !0; if (n$1 instanceof RegExp && n$1.test(e$1)) return !0; } } : () => !1; } class k { constructor(t$1) { this.options = t$1, this.currentNode = null, this.tagsNodeStack = [], this.docTypeEntities = {}, this.lastEntities = { apos: { regex: /&(apos|#39|#x27);/g, val: "'" }, gt: { regex: /&(gt|#62|#x3E);/g, val: ">" }, lt: { regex: /&(lt|#60|#x3C);/g, val: "<" }, quot: { regex: /&(quot|#34|#x22);/g, val: "\"" } }, this.ampEntity = { regex: /&(amp|#38|#x26);/g, val: "&" }, this.htmlEntities = { space: { regex: /&(nbsp|#160);/g, val: " " }, cent: { regex: /&(cent|#162);/g, val: "¢" }, pound: { regex: /&(pound|#163);/g, val: "£" }, yen: { regex: /&(yen|#165);/g, val: "¥" }, euro: { regex: /&(euro|#8364);/g, val: "€" }, copyright: { regex: /&(copy|#169);/g, val: "©" }, reg: { regex: /&(reg|#174);/g, val: "®" }, inr: { regex: /&(inr|#8377);/g, val: "₹" }, num_dec: { regex: /&#([0-9]{1,7});/g, val: (t$2, e$1) => String.fromCodePoint(Number.parseInt(e$1, 10)) }, num_hex: { regex: /&#x([0-9a-fA-F]{1,6});/g, val: (t$2, e$1) => String.fromCodePoint(Number.parseInt(e$1, 16)) } }, this.addExternalEntities = F, this.parseXml = X, this.parseTextData = L, this.resolveNameSpace = B, this.buildAttributesMap = G, this.isItStopNode = Z, this.replaceEntitiesValue = R, this.readStopNodeData = J, this.saveTextToParentTag = q, this.addChild = Y, this.ignoreAttributesFn = _(this.options.ignoreAttributes); } } function F(t$1) { const e$1 = Object.keys(t$1); for (let n$1 = 0; n$1 < e$1.length; n$1++) { const i$1 = e$1[n$1]; this.lastEntities[i$1] = { regex: new RegExp("&" + i$1 + ";", "g"), val: t$1[i$1] }; } } function L(t$1, e$1, n$1, i$1, s$2, r$1, o$1) { if (void 0 !== t$1 && (this.options.trimValues && !i$1 && (t$1 = t$1.trim()), t$1.length > 0)) { o$1 || (t$1 = this.replaceEntitiesValue(t$1)); const i$2 = this.options.tagValueProcessor(e$1, t$1, n$1, s$2, r$1); return null == i$2 ? t$1 : typeof i$2 != typeof t$1 || i$2 !== t$1 ? i$2 : this.options.trimValues || t$1.trim() === t$1 ? H(t$1, this.options.parseTagValue, this.options.numberParseOptions) : t$1; } } function B(t$1) { if (this.options.removeNSPrefix) { const e$1 = t$1.split(":"), n$1 = "/" === t$1.charAt(0) ? "/" : ""; if ("xmlns" === e$1[0]) return ""; 2 === e$1.length && (t$1 = n$1 + e$1[1]); } return t$1; } const U = new RegExp("([^\\s=]+)\\s*(=\\s*(['\"])([\\s\\S]*?)\\3)?", "gm"); function G(t$1, e$1, n$1) { if (!0 !== this.options.ignoreAttributes && "string" == typeof t$1) { const n$2 = s$1(t$1, U), i$1 = n$2.length, r$1 = {}; for (let t$2 = 0; t$2 < i$1; t$2++) { const i$2 = this.resolveNameSpace(n$2[t$2][1]); if (this.ignoreAttributesFn(i$2, e$1)) continue; let s$2 = n$2[t$2][4], o$1 = this.options.attributeNamePrefix + i$2; if (i$2.length) if (this.options.transformAttributeName && (o$1 = this.options.transformAttributeName(o$1)), "__proto__" === o$1 && (o$1 = "#__proto__"), void 0 !== s$2) { this.options.trimValues && (s$2 = s$2.trim()), s$2 = this.replaceEntitiesValue(s$2); const t$3 = this.options.attributeValueProcessor(i$2, s$2, e$1); r$1[o$1] = null == t$3 ? s$2 : typeof t$3 != typeof s$2 || t$3 !== s$2 ? t$3 : H(s$2, this.options.parseAttributeValue, this.options.numberParseOptions); } else this.options.allowBooleanAttributes && (r$1[o$1] = !0); } if (!Object.keys(r$1).length) return; if (this.options.attributesGroupName) { const t$2 = {}; return t$2[this.options.attributesGroupName] = r$1, t$2; } return r$1; } } const X = function(t$1) { t$1 = t$1.replace(/\r\n?/g, "\n"); const e$1 = new T("!xml"); let n$1 = e$1, i$1 = "", s$2 = ""; for (let r$1 = 0; r$1 < t$1.length; r$1++) if ("<" === t$1[r$1]) if ("/" === t$1[r$1 + 1]) { const e$2 = W(t$1, ">", r$1, "Closing Tag is not closed."); let o$1 = t$1.substring(r$1 + 2, e$2).trim(); if (this.options.removeNSPrefix) { const t$2 = o$1.indexOf(":"); -1 !== t$2 && (o$1 = o$1.substr(t$2 + 1)); } this.options.transformTagName && (o$1 = this.options.transformTagName(o$1)), n$1 && (i$1 = this.saveTextToParentTag(i$1, n$1, s$2)); const a$1 = s$2.substring(s$2.lastIndexOf(".") + 1); if (o$1 && -1 !== this.options.unpairedTags.indexOf(o$1)) throw new Error(`Unpaired tag can not be used as closing tag: `); let l$1 = 0; a$1 && -1 !== this.options.unpairedTags.indexOf(a$1) ? (l$1 = s$2.lastIndexOf(".", s$2.lastIndexOf(".") - 1), this.tagsNodeStack.pop()) : l$1 = s$2.lastIndexOf("."), s$2 = s$2.substring(0, l$1), n$1 = this.tagsNodeStack.pop(), i$1 = "", r$1 = e$2; } else if ("?" === t$1[r$1 + 1]) { let e$2 = z(t$1, r$1, !1, "?>"); if (!e$2) throw new Error("Pi Tag is not closed."); if (i$1 = this.saveTextToParentTag(i$1, n$1, s$2), this.options.ignoreDeclaration && "?xml" === e$2.tagName || this.options.ignorePiTags); else { const t$2 = new T(e$2.tagName); t$2.add(this.options.textNodeName, ""), e$2.tagName !== e$2.tagExp && e$2.attrExpPresent && (t$2[":@"] = this.buildAttributesMap(e$2.tagExp, s$2, e$2.tagName)), this.addChild(n$1, t$2, s$2, r$1); } r$1 = e$2.closeIndex + 1; } else if ("!--" === t$1.substr(r$1 + 1, 3)) { const e$2 = W(t$1, "-->", r$1 + 4, "Comment is not closed."); if (this.options.commentPropName) { const o$1 = t$1.substring(r$1 + 4, e$2 - 2); i$1 = this.saveTextToParentTag(i$1, n$1, s$2), n$1.add(this.options.commentPropName, [{ [this.options.textNodeName]: o$1 }]); } r$1 = e$2; } else if ("!D" === t$1.substr(r$1 + 1, 2)) { const e$2 = w$1(t$1, r$1); this.docTypeEntities = e$2.entities, r$1 = e$2.i; } else if ("![" === t$1.substr(r$1 + 1, 2)) { const e$2 = W(t$1, "]]>", r$1, "CDATA is not closed.") - 2, o$1 = t$1.substring(r$1 + 9, e$2); i$1 = this.saveTextToParentTag(i$1, n$1, s$2); let a$1 = this.parseTextData(o$1, n$1.tagname, s$2, !0, !1, !0, !0); a$1 ??= "", this.options.cdataPropName ? n$1.add(this.options.cdataPropName, [{ [this.options.textNodeName]: o$1 }]) : n$1.add(this.options.textNodeName, a$1), r$1 = e$2 + 2; } else { let o$1 = z(t$1, r$1, this.options.removeNSPrefix), a$1 = o$1.tagName; const l$1 = o$1.rawTagName; let u$1 = o$1.tagExp, h$2 = o$1.attrExpPresent, d$2 = o$1.closeIndex; this.options.transformTagName && (a$1 = this.options.transformTagName(a$1)), n$1 && i$1 && "!xml" !== n$1.tagname && (i$1 = this.saveTextToParentTag(i$1, n$1, s$2, !1)); const f$1 = n$1; f$1 && -1 !== this.options.unpairedTags.indexOf(f$1.tagname) && (n$1 = this.tagsNodeStack.pop(), s$2 = s$2.substring(0, s$2.lastIndexOf("."))), a$1 !== e$1.tagname && (s$2 += s$2 ? "." + a$1 : a$1); const c$1 = r$1; if (this.isItStopNode(this.options.stopNodes, s$2, a$1)) { let e$2 = ""; if (u$1.length > 0 && u$1.lastIndexOf("/") === u$1.length - 1) "/" === a$1[a$1.length - 1] ? (a$1 = a$1.substr(0, a$1.length - 1), s$2 = s$2.substr(0, s$2.length - 1), u$1 = a$1) : u$1 = u$1.substr(0, u$1.length - 1), r$1 = o$1.closeIndex; else if (-1 !== this.options.unpairedTags.indexOf(a$1)) r$1 = o$1.closeIndex; else { const n$2 = this.readStopNodeData(t$1, l$1, d$2 + 1); if (!n$2) throw new Error(`Unexpected end of ${l$1}`); r$1 = n$2.i, e$2 = n$2.tagContent; } const i$2 = new T(a$1); a$1 !== u$1 && h$2 && (i$2[":@"] = this.buildAttributesMap(u$1, s$2, a$1)), e$2 && (e$2 = this.parseTextData(e$2, a$1, s$2, !0, h$2, !0, !0)), s$2 = s$2.substr(0, s$2.lastIndexOf(".")), i$2.add(this.options.textNodeName, e$2), this.addChild(n$1, i$2, s$2, c$1); } else { if (u$1.length > 0 && u$1.lastIndexOf("/") === u$1.length - 1) { "/" === a$1[a$1.length - 1] ? (a$1 = a$1.substr(0, a$1.length - 1), s$2 = s$2.substr(0, s$2.length - 1), u$1 = a$1) : u$1 = u$1.substr(0, u$1.length - 1), this.options.transformTagName && (a$1 = this.options.transformTagName(a$1)); const t$2 = new T(a$1); a$1 !== u$1 && h$2 && (t$2[":@"] = this.buildAttributesMap(u$1, s$2, a$1)), this.addChild(n$1, t$2, s$2, c$1), s$2 = s$2.substr(0, s$2.lastIndexOf(".")); } else { const t$2 = new T(a$1); this.tagsNodeStack.push(n$1), a$1 !== u$1 && h$2 && (t$2[":@"] = this.buildAttributesMap(u$1, s$2, a$1)), this.addChild(n$1, t$2, s$2, c$1), n$1 = t$2; } i$1 = "", r$1 = d$2; } } else i$1 += t$1[r$1]; return e$1.child; }; function Y(t$1, e$1, n$1, i$1) { this.options.captureMetaData || (i$1 = void 0); const s$2 = this.options.updateTag(e$1.tagname, n$1, e$1[":@"]); !1 === s$2 || ("string" == typeof s$2 ? (e$1.tagname = s$2, t$1.addChild(e$1, i$1)) : t$1.addChild(e$1, i$1)); } const R = function(t$1) { if (this.options.processEntities) { for (let e$1 in this.docTypeEntities) { const n$1 = this.docTypeEntities[e$1]; t$1 = t$1.replace(n$1.regx, n$1.val); } for (let e$1 in this.lastEntities) { const n$1 = this.lastEntities[e$1]; t$1 = t$1.replace(n$1.regex, n$1.val); } if (this.options.htmlEntities) for (let e$1 in this.htmlEntities) { const n$1 = this.htmlEntities[e$1]; t$1 = t$1.replace(n$1.regex, n$1.val); } t$1 = t$1.replace(this.ampEntity.regex, this.ampEntity.val); } return t$1; }; function q(t$1, e$1, n$1, i$1) { return t$1 && (void 0 === i$1 && (i$1 = 0 === e$1.child.length), void 0 !== (t$1 = this.parseTextData(t$1, e$1.tagname, n$1, !1, !!e$1[":@"] && 0 !== Object.keys(e$1[":@"]).length, i$1)) && "" !== t$1 && e$1.add(this.options.textNodeName, t$1), t$1 = ""), t$1; } function Z(t$1, e$1, n$1) { const i$1 = "*." + n$1; for (const n$2 in t$1) { const s$2 = t$1[n$2]; if (i$1 === s$2 || e$1 === s$2) return !0; } return !1; } function W(t$1, e$1, n$1, i$1) { const s$2 = t$1.indexOf(e$1, n$1); if (-1 === s$2) throw new Error(i$1); return s$2 + e$1.length - 1; } function z(t$1, e$1, n$1, i$1 = ">") { const s$2 = function(t$2, e$2, n$2 = ">") { let i$2, s$3 = ""; for (let r$2 = e$2; r$2 < t$2.length; r$2++) { let e$3 = t$2[r$2]; if (i$2) e$3 === i$2 && (i$2 = ""); else if ("\"" === e$3 || "'" === e$3) i$2 = e$3; else if (e$3 === n$2[0]) { if (!n$2[1]) return { data: s$3, index: r$2 }; if (t$2[r$2 + 1] === n$2[1]) return { data: s$3, index: r$2 }; } else " " === e$3 && (e$3 = " "); s$3 += e$3; } }(t$1, e$1 + 1, i$1); if (!s$2) return; let r$1 = s$2.data; const o$1 = s$2.index, a$1 = r$1.search(/\s/); let l$1 = r$1, u$1 = !0; -1 !== a$1 && (l$1 = r$1.substring(0, a$1), r$1 = r$1.substring(a$1 + 1).trimStart()); const h$2 = l$1; if (n$1) { const t$2 = l$1.indexOf(":"); -1 !== t$2 && (l$1 = l$1.substr(t$2 + 1), u$1 = l$1 !== s$2.data.substr(t$2 + 1)); } return { tagName: l$1, tagExp: r$1, closeIndex: o$1, attrExpPresent: u$1, rawTagName: h$2 }; } function J(t$1, e$1, n$1) { const i$1 = n$1; let s$2 = 1; for (; n$1 < t$1.length; n$1++) if ("<" === t$1[n$1]) if ("/" === t$1[n$1 + 1]) { const r$1 = W(t$1, ">", n$1, `${e$1} is not closed`); if (t$1.substring(n$1 + 2, r$1).trim() === e$1 && (s$2--, 0 === s$2)) return { tagContent: t$1.substring(i$1, n$1), i: r$1 }; n$1 = r$1; } else if ("?" === t$1[n$1 + 1]) n$1 = W(t$1, "?>", n$1 + 1, "StopNode is not closed."); else if ("!--" === t$1.substr(n$1 + 1, 3)) n$1 = W(t$1, "-->", n$1 + 3, "StopNode is not closed."); else if ("![" === t$1.substr(n$1 + 1, 2)) n$1 = W(t$1, "]]>", n$1, "StopNode is not closed.") - 2; else { const i$2 = z(t$1, n$1, ">"); i$2 && ((i$2 && i$2.tagName) === e$1 && "/" !== i$2.tagExp[i$2.tagExp.length - 1] && s$2++, n$1 = i$2.closeIndex); } } function H(t$1, e$1, n$1) { if (e$1 && "string" == typeof t$1) { const e$2 = t$1.trim(); return "true" === e$2 || "false" !== e$2 && function(t$2, e$3 = {}) { if (e$3 = Object.assign({}, V, e$3), !t$2 || "string" != typeof t$2) return t$2; let n$2 = t$2.trim(); if (void 0 !== e$3.skipLike && e$3.skipLike.test(n$2)) return t$2; if ("0" === t$2) return 0; if (e$3.hex && j.test(n$2)) return function(t$3) { if (parseInt) return parseInt(t$3, 16); if (Number.parseInt) return Number.parseInt(t$3, 16); if (window && window.parseInt) return window.parseInt(t$3, 16); throw new Error("parseInt, Number.parseInt, window.parseInt are not supported"); }(n$2); if (-1 !== n$2.search(/.+[eE].+/)) return function(t$3, e$4, n$3) { if (!n$3.eNotation) return t$3; const i$2 = e$4.match(M); if (i$2) { let s$2 = i$2[1] || ""; const r$1 = -1 === i$2[3].indexOf("e") ? "E" : "e", o$1 = i$2[2], a$1 = s$2 ? t$3[o$1.length + 1] === r$1 : t$3[o$1.length] === r$1; return o$1.length > 1 && a$1 ? t$3 : 1 !== o$1.length || !i$2[3].startsWith(`.${r$1}`) && i$2[3][0] !== r$1 ? n$3.leadingZeros && !a$1 ? (e$4 = (i$2[1] || "") + i$2[3], Number(e$4)) : t$3 : Number(e$4); } return t$3; }(t$2, n$2, e$3); { const s$2 = D.exec(n$2); if (s$2) { const r$1 = s$2[1] || "", o$1 = s$2[2]; let a$1 = (i$1 = s$2[3]) && -1 !== i$1.indexOf(".") ? ("." === (i$1 = i$1.replace(/0+$/, "")) ? i$1 = "0" : "." === i$1[0] ? i$1 = "0" + i$1 : "." === i$1[i$1.length - 1] && (i$1 = i$1.substring(0, i$1.length - 1)), i$1) : i$1; const l$1 = r$1 ? "." === t$2[o$1.length + 1] : "." === t$2[o$1.length]; if (!e$3.leadingZeros && (o$1.length > 1 || 1 === o$1.length && !l$1)) return t$2; { const i$2 = Number(n$2), s$3 = String(i$2); if (0 === i$2 || -0 === i$2) return i$2; if (-1 !== s$3.search(/[eE]/)) return e$3.eNotation ? i$2 : t$2; if (-1 !== n$2.indexOf(".")) return "0" === s$3 || s$3 === a$1 || s$3 === `${r$1}${a$1}` ? i$2 : t$2; let l$2 = o$1 ? a$1 : n$2; return o$1 ? l$2 === s$3 || r$1 + l$2 === s$3 ? i$2 : t$2 : l$2 === s$3 || l$2 === r$1 + s$3 ? i$2 : t$2; } } return t$2; } var i$1; }(t$1, n$1); } return void 0 !== t$1 ? t$1 : ""; } const K = T.getMetaDataSymbol(); function Q(t$1, e$1) { return tt(t$1, e$1); } function tt(t$1, e$1, n$1) { let i$1; const s$2 = {}; for (let r$1 = 0; r$1 < t$1.length; r$1++) { const o$1 = t$1[r$1], a$1 = et(o$1); let l$1 = ""; if (l$1 = void 0 === n$1 ? a$1 : n$1 + "." + a$1, a$1 === e$1.textNodeName) void 0 === i$1 ? i$1 = o$1[a$1] : i$1 += "" + o$1[a$1]; else { if (void 0 === a$1) continue; if (o$1[a$1]) { let t$2 = tt(o$1[a$1], e$1, l$1); const n$2 = it(t$2, e$1); void 0 !== o$1[K] && (t$2[K] = o$1[K]), o$1[":@"] ? nt(t$2, o$1[":@"], l$1, e$1) : 1 !== Object.keys(t$2).length || void 0 === t$2[e$1.textNodeName] || e$1.alwaysCreateTextNode ? 0 === Object.keys(t$2).length && (e$1.alwaysCreateTextNode ? t$2[e$1.textNodeName] = "" : t$2 = "") : t$2 = t$2[e$1.textNodeName], void 0 !== s$2[a$1] && s$2.hasOwnProperty(a$1) ? (Array.isArray(s$2[a$1]) || (s$2[a$1] = [s$2[a$1]]), s$2[a$1].push(t$2)) : e$1.isArray(a$1, l$1, n$2) ? s$2[a$1] = [t$2] : s$2[a$1] = t$2; } } } return "string" == typeof i$1 ? i$1.length > 0 && (s$2[e$1.textNodeName] = i$1) : void 0 !== i$1 && (s$2[e$1.textNodeName] = i$1), s$2; } function et(t$1) { const e$1 = Object.keys(t$1); for (let t$2 = 0; t$2 < e$1.length; t$2++) { const n$1 = e$1[t$2]; if (":@" !== n$1) return n$1; } } function nt(t$1, e$1, n$1, i$1) { if (e$1) { const s$2 = Object.keys(e$1), r$1 = s$2.length; for (let o$1 = 0; o$1 < r$1; o$1++) { const r$2 = s$2[o$1]; i$1.isArray(r$2, n$1 + "." + r$2, !0, !0) ? t$1[r$2] = [e$1[r$2]] : t$1[r$2] = e$1[r$2]; } } } function it(t$1, e$1) { const { textNodeName: n$1 } = e$1, i$1 = Object.keys(t$1).length; return 0 === i$1 || !(1 !== i$1 || !t$1[n$1] && "boolean" != typeof t$1[n$1] && 0 !== t$1[n$1]); } class st { constructor(t$1) { this.externalEntities = {}, this.options = function(t$2) { return Object.assign({}, v, t$2); }(t$1); } parse(t$1, e$1) { if ("string" == typeof t$1); else { if (!t$1.toString) throw new Error("XML data is accepted in String or Bytes[] form."); t$1 = t$1.toString(); } if (e$1) { !0 === e$1 && (e$1 = {}); const n$2 = a(t$1, e$1); if (!0 !== n$2) throw Error(`${n$2.err.msg}:${n$2.err.line}:${n$2.err.col}`); } const n$1 = new k(this.options); n$1.addExternalEntities(this.externalEntities); const i$1 = n$1.parseXml(t$1); return this.options.preserveOrder || void 0 === i$1 ? i$1 : Q(i$1, this.options); } addEntity(t$1, e$1) { if (-1 !== e$1.indexOf("&")) throw new Error("Entity value can't have '&'"); if (-1 !== t$1.indexOf("&") || -1 !== t$1.indexOf(";")) throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '"); if ("&" === e$1) throw new Error("An entity with value '&' is not permitted"); this.externalEntities[t$1] = e$1; } static getMetaDataSymbol() { return T.getMetaDataSymbol(); } } function rt(t$1, e$1) { let n$1 = ""; return e$1.format && e$1.indentBy.length > 0 && (n$1 = "\n"), ot(t$1, e$1, "", n$1); } function ot(t$1, e$1, n$1, i$1) { let s$2 = "", r$1 = !1; for (let o$1 = 0; o$1 < t$1.length; o$1++) { const a$1 = t$1[o$1], l$1 = at(a$1); if (void 0 === l$1) continue; let u$1 = ""; if (u$1 = 0 === n$1.length ? l$1 : `${n$1}.${l$1}`, l$1 === e$1.textNodeName) { let t$2 = a$1[l$1]; ut(u$1, e$1) || (t$2 = e$1.tagValueProcessor(l$1, t$2), t$2 = ht(t$2, e$1)), r$1 && (s$2 += i$1), s$2 += t$2, r$1 = !1; continue; } if (l$1 === e$1.cdataPropName) { r$1 && (s$2 += i$1), s$2 += ``, r$1 = !1; continue; } if (l$1 === e$1.commentPropName) { s$2 += i$1 + `\x3c!--${a$1[l$1][0][e$1.textNodeName]}--\x3e`, r$1 = !0; continue; } if ("?" === l$1[0]) { const t$2 = lt(a$1[":@"], e$1), n$2 = "?xml" === l$1 ? "" : i$1; let o$2 = a$1[l$1][0][e$1.textNodeName]; o$2 = 0 !== o$2.length ? " " + o$2 : "", s$2 += n$2 + `<${l$1}${o$2}${t$2}?>`, r$1 = !0; continue; } let h$2 = i$1; "" !== h$2 && (h$2 += e$1.indentBy); const d$2 = i$1 + `<${l$1}${lt(a$1[":@"], e$1)}`, f$1 = ot(a$1[l$1], e$1, u$1, h$2); -1 !== e$1.unpairedTags.indexOf(l$1) ? e$1.suppressUnpairedNode ? s$2 += d$2 + ">" : s$2 += d$2 + "/>" : f$1 && 0 !== f$1.length || !e$1.suppressEmptyNode ? f$1 && f$1.endsWith(">") ? s$2 += d$2 + `>${f$1}${i$1}` : (s$2 += d$2 + ">", f$1 && "" !== i$1 && (f$1.includes("/>") || f$1.includes("`) : s$2 += d$2 + "/>", r$1 = !0; } return s$2; } function at(t$1) { const e$1 = Object.keys(t$1); for (let n$1 = 0; n$1 < e$1.length; n$1++) { const i$1 = e$1[n$1]; if (t$1.hasOwnProperty(i$1) && ":@" !== i$1) return i$1; } } function lt(t$1, e$1) { let n$1 = ""; if (t$1 && !e$1.ignoreAttributes) for (let i$1 in t$1) { if (!t$1.hasOwnProperty(i$1)) continue; let s$2 = e$1.attributeValueProcessor(i$1, t$1[i$1]); s$2 = ht(s$2, e$1), !0 === s$2 && e$1.suppressBooleanAttributes ? n$1 += ` ${i$1.substr(e$1.attributeNamePrefix.length)}` : n$1 += ` ${i$1.substr(e$1.attributeNamePrefix.length)}="${s$2}"`; } return n$1; } function ut(t$1, e$1) { let n$1 = (t$1 = t$1.substr(0, t$1.length - e$1.textNodeName.length - 1)).substr(t$1.lastIndexOf(".") + 1); for (let i$1 in e$1.stopNodes) if (e$1.stopNodes[i$1] === t$1 || e$1.stopNodes[i$1] === "*." + n$1) return !0; return !1; } function ht(t$1, e$1) { if (t$1 && t$1.length > 0 && e$1.processEntities) for (let n$1 = 0; n$1 < e$1.entities.length; n$1++) { const i$1 = e$1.entities[n$1]; t$1 = t$1.replace(i$1.regex, i$1.val); } return t$1; } const dt = { attributeNamePrefix: "@_", attributesGroupName: !1, textNodeName: "#text", ignoreAttributes: !0, cdataPropName: !1, format: !1, indentBy: " ", suppressEmptyNode: !1, suppressUnpairedNode: !0, suppressBooleanAttributes: !0, tagValueProcessor: function(t$1, e$1) { return e$1; }, attributeValueProcessor: function(t$1, e$1) { return e$1; }, preserveOrder: !1, commentPropName: !1, unpairedTags: [], entities: [ { regex: new RegExp("&", "g"), val: "&" }, { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("'", "g"), val: "'" }, { regex: new RegExp("\"", "g"), val: """ } ], processEntities: !0, stopNodes: [], oneListGroup: !1 }; function ft(t$1) { this.options = Object.assign({}, dt, t$1), !0 === this.options.ignoreAttributes || this.options.attributesGroupName ? this.isAttribute = function() { return !1; } : (this.ignoreAttributesFn = _(this.options.ignoreAttributes), this.attrPrefixLen = this.options.attributeNamePrefix.length, this.isAttribute = gt), this.processTextOrObjNode = ct, this.options.format ? (this.indentate = pt, this.tagEndChar = ">\n", this.newLine = "\n") : (this.indentate = function() { return ""; }, this.tagEndChar = ">", this.newLine = ""); } function ct(t$1, e$1, n$1, i$1) { const s$2 = this.j2x(t$1, n$1 + 1, i$1.concat(e$1)); return void 0 !== t$1[this.options.textNodeName] && 1 === Object.keys(t$1).length ? this.buildTextValNode(t$1[this.options.textNodeName], e$1, s$2.attrStr, n$1) : this.buildObjectNode(s$2.val, e$1, s$2.attrStr, n$1); } function pt(t$1) { return this.options.indentBy.repeat(t$1); } function gt(t$1) { return !(!t$1.startsWith(this.options.attributeNamePrefix) || t$1 === this.options.textNodeName) && t$1.substr(this.attrPrefixLen); } ft.prototype.build = function(t$1) { return this.options.preserveOrder ? rt(t$1, this.options) : (Array.isArray(t$1) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1 && (t$1 = { [this.options.arrayNodeName]: t$1 }), this.j2x(t$1, 0, []).val); }, ft.prototype.j2x = function(t$1, e$1, n$1) { let i$1 = "", s$2 = ""; const r$1 = n$1.join("."); for (let o$1 in t$1) if (Object.prototype.hasOwnProperty.call(t$1, o$1)) if (void 0 === t$1[o$1]) this.isAttribute(o$1) && (s$2 += ""); else if (null === t$1[o$1]) this.isAttribute(o$1) || o$1 === this.options.cdataPropName ? s$2 += "" : "?" === o$1[0] ? s$2 += this.indentate(e$1) + "<" + o$1 + "?" + this.tagEndChar : s$2 += this.indentate(e$1) + "<" + o$1 + "/" + this.tagEndChar; else if (t$1[o$1] instanceof Date) s$2 += this.buildTextValNode(t$1[o$1], o$1, "", e$1); else if ("object" != typeof t$1[o$1]) { const n$2 = this.isAttribute(o$1); if (n$2 && !this.ignoreAttributesFn(n$2, r$1)) i$1 += this.buildAttrPairStr(n$2, "" + t$1[o$1]); else if (!n$2) if (o$1 === this.options.textNodeName) { let e$2 = this.options.tagValueProcessor(o$1, "" + t$1[o$1]); s$2 += this.replaceEntitiesValue(e$2); } else s$2 += this.buildTextValNode(t$1[o$1], o$1, "", e$1); } else if (Array.isArray(t$1[o$1])) { const i$2 = t$1[o$1].length; let r$2 = "", a$1 = ""; for (let l$1 = 0; l$1 < i$2; l$1++) { const i$3 = t$1[o$1][l$1]; if (void 0 === i$3); else if (null === i$3) "?" === o$1[0] ? s$2 += this.indentate(e$1) + "<" + o$1 + "?" + this.tagEndChar : s$2 += this.indentate(e$1) + "<" + o$1 + "/" + this.tagEndChar; else if ("object" == typeof i$3) if (this.options.oneListGroup) { const t$2 = this.j2x(i$3, e$1 + 1, n$1.concat(o$1)); r$2 += t$2.val, this.options.attributesGroupName && i$3.hasOwnProperty(this.options.attributesGroupName) && (a$1 += t$2.attrStr); } else r$2 += this.processTextOrObjNode(i$3, o$1, e$1, n$1); else if (this.options.oneListGroup) { let t$2 = this.options.tagValueProcessor(o$1, i$3); t$2 = this.replaceEntitiesValue(t$2), r$2 += t$2; } else r$2 += this.buildTextValNode(i$3, o$1, "", e$1); } this.options.oneListGroup && (r$2 = this.buildObjectNode(r$2, o$1, a$1, e$1)), s$2 += r$2; } else if (this.options.attributesGroupName && o$1 === this.options.attributesGroupName) { const e$2 = Object.keys(t$1[o$1]), n$2 = e$2.length; for (let s$3 = 0; s$3 < n$2; s$3++) i$1 += this.buildAttrPairStr(e$2[s$3], "" + t$1[o$1][e$2[s$3]]); } else s$2 += this.processTextOrObjNode(t$1[o$1], o$1, e$1, n$1); return { attrStr: i$1, val: s$2 }; }, ft.prototype.buildAttrPairStr = function(t$1, e$1) { return e$1 = this.options.attributeValueProcessor(t$1, "" + e$1), e$1 = this.replaceEntitiesValue(e$1), this.options.suppressBooleanAttributes && "true" === e$1 ? " " + t$1 : " " + t$1 + "=\"" + e$1 + "\""; }, ft.prototype.buildObjectNode = function(t$1, e$1, n$1, i$1) { if ("" === t$1) return "?" === e$1[0] ? this.indentate(i$1) + "<" + e$1 + n$1 + "?" + this.tagEndChar : this.indentate(i$1) + "<" + e$1 + n$1 + this.closeTag(e$1) + this.tagEndChar; { let s$2 = "" + t$1 + s$2; } }, ft.prototype.closeTag = function(t$1) { let e$1 = ""; return -1 !== this.options.unpairedTags.indexOf(t$1) ? this.options.suppressUnpairedNode || (e$1 = "/") : e$1 = this.options.suppressEmptyNode ? "/" : `>` + this.newLine; if (!1 !== this.options.commentPropName && e$1 === this.options.commentPropName) return this.indentate(i$1) + `\x3c!--${t$1}--\x3e` + this.newLine; if ("?" === e$1[0]) return this.indentate(i$1) + "<" + e$1 + n$1 + "?" + this.tagEndChar; { let s$2 = this.options.tagValueProcessor(e$1, t$1); return s$2 = this.replaceEntitiesValue(s$2), "" === s$2 ? this.indentate(i$1) + "<" + e$1 + n$1 + this.closeTag(e$1) + this.tagEndChar : this.indentate(i$1) + "<" + e$1 + n$1 + ">" + s$2 + " 0 && this.options.processEntities) for (let e$1 = 0; e$1 < this.options.entities.length; e$1++) { const n$1 = this.options.entities[e$1]; t$1 = t$1.replace(n$1.regex, n$1.val); } return t$1; }; const mt = { validate: a }; module.exports = e; })(); } }); //#endregion //#region node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/xml.common.js var require_xml_common = __commonJS({ "node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/xml.common.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; /** * Default key used to access the XML attributes. */ exports.XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ exports.XML_CHARKEY = "_"; } }); //#endregion //#region node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/xml.js var require_xml = __commonJS({ "node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/xml.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.stringifyXML = stringifyXML; exports.parseXML = parseXML; const fast_xml_parser_1 = require_fxp(); const xml_common_js_1$1 = require_xml_common(); function getCommonOptions(options) { var _a$2; return { attributesGroupName: xml_common_js_1$1.XML_ATTRKEY, textNodeName: (_a$2 = options.xmlCharKey) !== null && _a$2 !== void 0 ? _a$2 : xml_common_js_1$1.XML_CHARKEY, ignoreAttributes: false, suppressBooleanAttributes: false }; } function getSerializerOptions(options = {}) { var _a$2, _b$1; return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a$2 = options.rootName) !== null && _a$2 !== void 0 ? _a$2 : "root", cdataPropName: (_b$1 = options.cdataPropName) !== null && _b$1 !== void 0 ? _b$1 : "__cdata" }); } function getParserOptions(options = {}) { return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true, trimValues: false }); } /** * Converts given JSON object to XML string * @param obj - JSON object to be converted into XML string * @param opts - Options that govern the XML building of given JSON object * `rootName` indicates the name of the root element in the resulting XML */ function stringifyXML(obj, opts = {}) { const parserOptions = getSerializerOptions(opts); const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); const node = { [parserOptions.rootNodeName]: obj }; const xmlData = j2x.build(node); return `${xmlData}`.replace(/\n/g, ""); } /** * Converts given XML string into JSON * @param str - String containing the XML content to be parsed into JSON * @param opts - Options that govern the parsing of given xml string * `includeRoot` indicates whether the root element is to be included or not in the output */ async function parseXML(str, opts = {}) { if (!str) throw new Error("Document is empty"); const validation = fast_xml_parser_1.XMLValidator.validate(str); if (validation !== true) throw validation; const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); const parsedXml = parser.parse(str); if (parsedXml["?xml"]) delete parsedXml["?xml"]; if (!opts.includeRoot) for (const key of Object.keys(parsedXml)) { const value = parsedXml[key]; return typeof value === "object" ? Object.assign({}, value) : value; } return parsedXml; } } }); //#endregion //#region node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/index.js var require_commonjs$3 = __commonJS({ "node_modules/.deno/@azure+core-xml@1.4.5/node_modules/@azure/core-xml/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0; var xml_js_1 = require_xml(); Object.defineProperty(exports, "stringifyXML", { enumerable: true, get: function() { return xml_js_1.stringifyXML; } }); Object.defineProperty(exports, "parseXML", { enumerable: true, get: function() { return xml_js_1.parseXML; } }); var xml_common_js_1 = require_xml_common(); Object.defineProperty(exports, "XML_ATTRKEY", { enumerable: true, get: function() { return xml_common_js_1.XML_ATTRKEY; } }); Object.defineProperty(exports, "XML_CHARKEY", { enumerable: true, get: function() { return xml_common_js_1.XML_CHARKEY; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/logger.js var require_logger = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/logger.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.logger = void 0; const logger_1 = require_commonjs$11(); /** * The `@azure/logger` configuration for this package. * @internal */ exports.logger = (0, logger_1.createClientLogger)("core-lro"); } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js var require_constants = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/constants.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0; /** * The default time interval to wait before sending the next polling request. */ exports.POLL_INTERVAL_IN_MS = 2e3; /** * The closed set of terminal states. */ exports.terminalStates = [ "succeeded", "canceled", "failed" ]; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js var require_operation$2 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.pollOperation = exports.initOperation = exports.deserializeState = void 0; const logger_js_1$2 = require_logger(); const constants_js_1$2 = require_constants(); /** * Deserializes the state */ function deserializeState(serializedState) { try { return JSON.parse(serializedState).state; } catch (e) { throw new Error(`Unable to deserialize input state: ${serializedState}`); } } exports.deserializeState = deserializeState; function setStateError(inputs) { const { state: state$1, stateProxy, isOperationError: isOperationError$1 } = inputs; return (error) => { if (isOperationError$1(error)) { stateProxy.setError(state$1, error); stateProxy.setFailed(state$1); } throw error; }; } function appendReadableErrorMessage(currentMessage, innerMessage) { let message = currentMessage; if (message.slice(-1) !== ".") message = message + "."; return message + " " + innerMessage; } function simplifyError(err) { let message = err.message; let code = err.code; let curErr = err; while (curErr.innererror) { curErr = curErr.innererror; code = curErr.code; message = appendReadableErrorMessage(message, curErr.message); } return { code, message }; } function processOperationStatus(result) { const { state: state$1, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; switch (status) { case "succeeded": { stateProxy.setSucceeded(state$1); break; } case "failed": { const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); postfix = `. ${code}. ${message}`; } const errStr = `The long-running operation has failed${postfix}`; stateProxy.setError(state$1, new Error(errStr)); stateProxy.setFailed(state$1); logger_js_1$2.logger.warning(errStr); break; } case "canceled": { stateProxy.setCanceled(state$1); break; } } if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state$1)) || isDone === void 0 && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status)) stateProxy.setResult(state$1, buildResult({ response, state: state$1, processResult })); } function buildResult(inputs) { const { processResult, response, state: state$1 } = inputs; return processResult ? processResult(response, state$1) : response; } /** * Initiates the long-running operation. */ async function initOperation(inputs) { const { init: init$1, stateProxy, processResult, getOperationStatus: getOperationStatus$1, withOperationLocation, setErrorAsResult } = inputs; const { operationLocation, resourceLocation, metadata: metadata$1, response } = await init$1(); if (operationLocation) withOperationLocation === null || withOperationLocation === void 0 || withOperationLocation(operationLocation, false); const config = { metadata: metadata$1, operationLocation, resourceLocation }; logger_js_1$2.logger.verbose(`LRO: Operation description:`, config); const state$1 = stateProxy.initState(config); const status = getOperationStatus$1({ response, state: state$1, operationLocation }); processOperationStatus({ state: state$1, status, stateProxy, response, setErrorAsResult, processResult }); return state$1; } exports.initOperation = initOperation; async function pollOperationHelper(inputs) { const { poll, state: state$1, stateProxy, operationLocation, getOperationStatus: getOperationStatus$1, getResourceLocation: getResourceLocation$1, isOperationError: isOperationError$1, options } = inputs; const response = await poll(operationLocation, options).catch(setStateError({ state: state$1, stateProxy, isOperationError: isOperationError$1 })); const status = getOperationStatus$1(response, state$1); logger_js_1$2.logger.verbose(`LRO: Status:\n\tPolling from: ${state$1.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1$2.terminalStates.includes(status) ? "Stopped" : "Running"}`); if (status === "succeeded") { const resourceLocation = getResourceLocation$1(response, state$1); if (resourceLocation !== void 0) return { response: await poll(resourceLocation).catch(setStateError({ state: state$1, stateProxy, isOperationError: isOperationError$1 })), status }; } return { response, status }; } /** Polls the long-running operation. */ async function pollOperation(inputs) { const { poll, state: state$1, stateProxy, options, getOperationStatus: getOperationStatus$1, getResourceLocation: getResourceLocation$1, getOperationLocation: getOperationLocation$1, isOperationError: isOperationError$1, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult } = inputs; const { operationLocation } = state$1.config; if (operationLocation !== void 0) { const { response, status } = await pollOperationHelper({ poll, getOperationStatus: getOperationStatus$1, state: state$1, stateProxy, operationLocation, getResourceLocation: getResourceLocation$1, isOperationError: isOperationError$1, options }); processOperationStatus({ status, response, state: state$1, stateProxy, isDone, processResult, getError, setErrorAsResult }); if (!constants_js_1$2.terminalStates.includes(status)) { const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); const location = getOperationLocation$1 === null || getOperationLocation$1 === void 0 ? void 0 : getOperationLocation$1(response, state$1); if (location !== void 0) { const isUpdated = operationLocation !== location; state$1.config.operationLocation = location; withOperationLocation === null || withOperationLocation === void 0 || withOperationLocation(location, isUpdated); } else withOperationLocation === null || withOperationLocation === void 0 || withOperationLocation(operationLocation, false); } updateState === null || updateState === void 0 || updateState(state$1, response); } } exports.pollOperation = pollOperation; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/http/operation.js var require_operation$1 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/http/operation.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0; const operation_js_1$4 = require_operation$2(); const logger_js_1$1 = require_logger(); function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; } function getOperationLocationHeader(rawResponse) { return rawResponse.headers["operation-location"]; } function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { var _a$2; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": return requestPath; case "DELETE": return void 0; case "PATCH": return (_a$2 = getDefault()) !== null && _a$2 !== void 0 ? _a$2 : requestPath; default: return getDefault(); } function getDefault() { switch (resourceLocationConfig) { case "azure-async-operation": return void 0; case "original-uri": return requestPath; case "location": default: return location; } } } function inferLroMode(inputs) { const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; const operationLocation = getOperationLocationHeader(rawResponse); const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== void 0) return { mode: "OperationLocation", operationLocation: pollingUrl, resourceLocation: findResourceLocation({ requestMethod: normalizedRequestMethod, location, requestPath, resourceLocationConfig }) }; else if (location !== void 0) return { mode: "ResourceLocation", operationLocation: location }; else if (normalizedRequestMethod === "PUT" && requestPath) return { mode: "Body", operationLocation: requestPath }; else return void 0; } exports.inferLroMode = inferLroMode; function transformStatus(inputs) { const { status, statusCode } = inputs; if (typeof status !== "string" && status !== void 0) throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case void 0: return toOperationStatus(statusCode); case "succeeded": return "succeeded"; case "failed": return "failed"; case "running": case "accepted": case "started": case "canceling": case "cancelling": return "running"; case "canceled": case "cancelled": return "canceled"; default: { logger_js_1$1.logger.verbose(`LRO: unrecognized operation status: ${status}`); return status; } } } function getStatus(rawResponse) { var _a$2; const { status } = (_a$2 = rawResponse.body) !== null && _a$2 !== void 0 ? _a$2 : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { var _a$2, _b$1; const { properties, provisioningState } = (_a$2 = rawResponse.body) !== null && _a$2 !== void 0 ? _a$2 : {}; const status = (_b$1 = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b$1 !== void 0 ? _b$1 : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { if (statusCode === 202) return "running"; else if (statusCode < 300) return "succeeded"; else return "failed"; } function parseRetryAfter({ rawResponse }) { const retryAfter = rawResponse.headers["retry-after"]; if (retryAfter !== void 0) { const retryAfterInSeconds = parseInt(retryAfter); return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1e3; } return void 0; } exports.parseRetryAfter = parseRetryAfter; function getErrorFromResponse(response) { const error = accessBodyProperty(response, "error"); if (!error) { logger_js_1$1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); return; } if (!error.code || !error.message) { logger_js_1$1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); return; } return error; } exports.getErrorFromResponse = getErrorFromResponse; function calculatePollingIntervalFromDate(retryAfterDate) { const timeNow = Math.floor(new Date().getTime()); const retryAfterTime = retryAfterDate.getTime(); if (timeNow < retryAfterTime) return retryAfterTime - timeNow; return void 0; } function getStatusFromInitialResponse(inputs) { const { response, state: state$1, operationLocation } = inputs; function helper() { var _a$2; const mode = (_a$2 = state$1.config.metadata) === null || _a$2 === void 0 ? void 0 : _a$2["mode"]; switch (mode) { case void 0: return toOperationStatus(response.rawResponse.statusCode); case "Body": return getOperationStatus(response, state$1); default: return "running"; } } const status = helper(); return status === "running" && operationLocation === void 0 ? "succeeded" : status; } exports.getStatusFromInitialResponse = getStatusFromInitialResponse; /** * Initiates the long-running operation. */ async function initHttpOperation(inputs) { const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; return (0, operation_js_1$4.initOperation)({ init: async () => { const response = await lro.sendInitialRequest(); const config = inferLroMode({ rawResponse: response.rawResponse, requestPath: lro.requestPath, requestMethod: lro.requestMethod, resourceLocationConfig }); return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); }, stateProxy, processResult: processResult ? ({ flatResponse }, state$1) => processResult(flatResponse, state$1) : ({ flatResponse }) => flatResponse, getOperationStatus: getStatusFromInitialResponse, setErrorAsResult }); } exports.initHttpOperation = initHttpOperation; function getOperationLocation({ rawResponse }, state$1) { var _a$2; const mode = (_a$2 = state$1.config.metadata) === null || _a$2 === void 0 ? void 0 : _a$2["mode"]; switch (mode) { case "OperationLocation": return getOperationLocationPollingUrl({ operationLocation: getOperationLocationHeader(rawResponse), azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse) }); case "ResourceLocation": return getLocationHeader(rawResponse); case "Body": default: return void 0; } } exports.getOperationLocation = getOperationLocation; function getOperationStatus({ rawResponse }, state$1) { var _a$2; const mode = (_a$2 = state$1.config.metadata) === null || _a$2 === void 0 ? void 0 : _a$2["mode"]; switch (mode) { case "OperationLocation": return getStatus(rawResponse); case "ResourceLocation": return toOperationStatus(rawResponse.statusCode); case "Body": return getProvisioningState(rawResponse); default: throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } } exports.getOperationStatus = getOperationStatus; function accessBodyProperty({ flatResponse, rawResponse }, prop) { var _a$2, _b$1; return (_a$2 = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a$2 !== void 0 ? _a$2 : (_b$1 = rawResponse.body) === null || _b$1 === void 0 ? void 0 : _b$1[prop]; } function getResourceLocation(res, state$1) { const loc = accessBodyProperty(res, "resourceLocation"); if (loc && typeof loc === "string") state$1.config.resourceLocation = loc; return state$1.config.resourceLocation; } exports.getResourceLocation = getResourceLocation; function isOperationError(e) { return e.name === "RestError"; } exports.isOperationError = isOperationError; /** Polls the long-running operation. */ async function pollHttpOperation(inputs) { const { lro, stateProxy, options, processResult, updateState, setDelay, state: state$1, setErrorAsResult } = inputs; return (0, operation_js_1$4.pollOperation)({ state: state$1, stateProxy, setDelay, processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, getError: getErrorFromResponse, updateState, getPollingInterval: parseRetryAfter, getOperationLocation, getOperationStatus, isOperationError, getResourceLocation, options, poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), setErrorAsResult }); } exports.pollHttpOperation = pollHttpOperation; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js var require_poller$2 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.buildCreatePoller = void 0; const operation_js_1$3 = require_operation$2(); const constants_js_1$1 = require_constants(); const core_util_1 = require_commonjs$9(); const createStateProxy$1 = () => ({ initState: (config) => ({ status: "running", config }), setCanceled: (state$1) => state$1.status = "canceled", setError: (state$1, error) => state$1.error = error, setResult: (state$1, result) => state$1.result = result, setRunning: (state$1) => state$1.status = "running", setSucceeded: (state$1) => state$1.status = "succeeded", setFailed: (state$1) => state$1.status = "failed", getError: (state$1) => state$1.error, getResult: (state$1) => state$1.result, isCanceled: (state$1) => state$1.status === "canceled", isFailed: (state$1) => state$1.status === "failed", isRunning: (state$1) => state$1.status === "running", isSucceeded: (state$1) => state$1.status === "succeeded" }); /** * Returns a poller factory. */ function buildCreatePoller(inputs) { const { getOperationLocation: getOperationLocation$1, getStatusFromInitialResponse: getStatusFromInitialResponse$1, getStatusFromPollResponse, isOperationError: isOperationError$1, getResourceLocation: getResourceLocation$1, getPollingInterval, getError, resolveOnUnsuccessful } = inputs; return async ({ init: init$1, poll }, options) => { const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1$1.POLL_INTERVAL_IN_MS, restoreFrom } = options || {}; const stateProxy = createStateProxy$1(); const withOperationLocation = withOperationLocationCallback ? (() => { let called = false; return (operationLocation, isUpdated) => { if (isUpdated) withOperationLocationCallback(operationLocation); else if (!called) withOperationLocationCallback(operationLocation); called = true; }; })() : void 0; const state$1 = restoreFrom ? (0, operation_js_1$3.deserializeState)(restoreFrom) : await (0, operation_js_1$3.initOperation)({ init: init$1, stateProxy, processResult, getOperationStatus: getStatusFromInitialResponse$1, withOperationLocation, setErrorAsResult: !resolveOnUnsuccessful }); let resultPromise; const abortController$1 = new AbortController(); const handlers = new Map(); const handleProgressEvents = async () => handlers.forEach((h$1) => h$1(state$1)); const cancelErrMsg = "Operation was canceled"; let currentPollIntervalInMs = intervalInMs; const poller = { getOperationState: () => state$1, getResult: () => state$1.result, isDone: () => [ "succeeded", "failed", "canceled" ].includes(state$1.status), isStopped: () => resultPromise === void 0, stopPolling: () => { abortController$1.abort(); }, toString: () => JSON.stringify({ state: state$1 }), onProgress: (callback) => { const s$1 = Symbol(); handlers.set(s$1, callback); return () => handlers.delete(s$1); }, pollUntilDone: (pollOptions) => resultPromise !== null && resultPromise !== void 0 ? resultPromise : resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; function abortListener() { abortController$1.abort(); } const abortSignal$1 = abortController$1.signal; if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) abortController$1.abort(); else if (!abortSignal$1.aborted) inputAbortSignal === null || inputAbortSignal === void 0 || inputAbortSignal.addEventListener("abort", abortListener, { once: true }); try { if (!poller.isDone()) { await poller.poll({ abortSignal: abortSignal$1 }); while (!poller.isDone()) { await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal: abortSignal$1 }); await poller.poll({ abortSignal: abortSignal$1 }); } } } finally { inputAbortSignal === null || inputAbortSignal === void 0 || inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) return poller.getResult(); else switch (state$1.status) { case "succeeded": return poller.getResult(); case "canceled": throw new Error(cancelErrMsg); case "failed": throw state$1.error; case "notStarted": case "running": throw new Error(`Polling completed without succeeding or failing`); } })().finally(() => { resultPromise = void 0; }), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) return; } else switch (state$1.status) { case "succeeded": return; case "canceled": throw new Error(cancelErrMsg); case "failed": throw state$1.error; } await (0, operation_js_1$3.pollOperation)({ poll, state: state$1, stateProxy, getOperationLocation: getOperationLocation$1, isOperationError: isOperationError$1, withOperationLocation, getPollingInterval, getOperationStatus: getStatusFromPollResponse, getResourceLocation: getResourceLocation$1, processResult, getError, updateState, options: pollOptions, setDelay: (pollIntervalInMs) => { currentPollIntervalInMs = pollIntervalInMs; }, setErrorAsResult: !resolveOnUnsuccessful }); await handleProgressEvents(); if (!resolveOnUnsuccessful) switch (state$1.status) { case "canceled": throw new Error(cancelErrMsg); case "failed": throw state$1.error; } } }; return poller; }; } exports.buildCreatePoller = buildCreatePoller; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/http/poller.js var require_poller$1 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/http/poller.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createHttpPoller = void 0; const operation_js_1$2 = require_operation$1(); const poller_js_1$2 = require_poller$2(); /** * Creates a poller that can be used to poll a long-running operation. * @param lro - Description of the long-running operation * @param options - options to configure the poller * @returns an initialized poller */ async function createHttpPoller(lro, options) { const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false } = options || {}; return (0, poller_js_1$2.buildCreatePoller)({ getStatusFromInitialResponse: operation_js_1$2.getStatusFromInitialResponse, getStatusFromPollResponse: operation_js_1$2.getOperationStatus, isOperationError: operation_js_1$2.isOperationError, getOperationLocation: operation_js_1$2.getOperationLocation, getResourceLocation: operation_js_1$2.getResourceLocation, getPollingInterval: operation_js_1$2.parseRetryAfter, getError: operation_js_1$2.getErrorFromResponse, resolveOnUnsuccessful })({ init: async () => { const response = await lro.sendInitialRequest(); const config = (0, operation_js_1$2.inferLroMode)({ rawResponse: response.rawResponse, requestPath: lro.requestPath, requestMethod: lro.requestMethod, resourceLocationConfig }); return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, (config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {}); }, poll: lro.sendPollRequest }, { intervalInMs, withOperationLocation, restoreFrom, updateState, processResult: processResult ? ({ flatResponse }, state$1) => processResult(flatResponse, state$1) : ({ flatResponse }) => flatResponse }); } exports.createHttpPoller = createHttpPoller; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js var require_operation = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.GenericPollOperation = void 0; const operation_js_1$1 = require_operation$1(); const logger_js_1 = require_logger(); const createStateProxy = () => ({ initState: (config) => ({ config, isStarted: true }), setCanceled: (state$1) => state$1.isCancelled = true, setError: (state$1, error) => state$1.error = error, setResult: (state$1, result) => state$1.result = result, setRunning: (state$1) => state$1.isStarted = true, setSucceeded: (state$1) => state$1.isCompleted = true, setFailed: () => { /** empty body */ }, getError: (state$1) => state$1.error, getResult: (state$1) => state$1.result, isCanceled: (state$1) => !!state$1.isCancelled, isFailed: (state$1) => !!state$1.error, isRunning: (state$1) => !!state$1.isStarted, isSucceeded: (state$1) => Boolean(state$1.isCompleted && !state$1.isCancelled && !state$1.error) }); var GenericPollOperation = class { constructor(state$1, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state$1; this.lro = lro; this.setErrorAsResult = setErrorAsResult; this.lroResourceLocationConfig = lroResourceLocationConfig; this.processResult = processResult; this.updateState = updateState; this.isDone = isDone; } setPollerConfig(pollerConfig) { this.pollerConfig = pollerConfig; } async update(options) { var _a$2; const stateProxy = createStateProxy(); if (!this.state.isStarted) this.state = Object.assign(Object.assign({}, this.state), await (0, operation_js_1$1.initHttpOperation)({ lro: this.lro, stateProxy, resourceLocationConfig: this.lroResourceLocationConfig, processResult: this.processResult, setErrorAsResult: this.setErrorAsResult })); const updateState = this.updateState; const isDone = this.isDone; if (!this.state.isCompleted && this.state.error === void 0) await (0, operation_js_1$1.pollHttpOperation)({ lro: this.lro, state: this.state, stateProxy, processResult: this.processResult, updateState: updateState ? (state$1, { rawResponse }) => updateState(state$1, rawResponse) : void 0, isDone: isDone ? ({ flatResponse }, state$1) => isDone(flatResponse, state$1) : void 0, options, setDelay: (intervalInMs) => { this.pollerConfig.intervalInMs = intervalInMs; }, setErrorAsResult: this.setErrorAsResult }); (_a$2 = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a$2 === void 0 || _a$2.call(options, this.state); return this; } async cancel() { logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); return this; } /** * Serializes the Poller operation. */ toString() { return JSON.stringify({ state: this.state }); } }; exports.GenericPollOperation = GenericPollOperation; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js var require_poller = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0; /** * When a poller is manually stopped through the `stopPolling` method, * the poller will be rejected with an instance of the PollerStoppedError. */ var PollerStoppedError = class PollerStoppedError extends Error { constructor(message) { super(message); this.name = "PollerStoppedError"; Object.setPrototypeOf(this, PollerStoppedError.prototype); } }; exports.PollerStoppedError = PollerStoppedError; /** * When the operation is cancelled, the poller will be rejected with an instance * of the PollerCancelledError. */ var PollerCancelledError = class PollerCancelledError extends Error { constructor(message) { super(message); this.name = "PollerCancelledError"; Object.setPrototypeOf(this, PollerCancelledError.prototype); } }; exports.PollerCancelledError = PollerCancelledError; /** * A class that represents the definition of a program that polls through consecutive requests * until it reaches a state of completion. * * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. * * ```ts * const poller = new MyPoller(); * * // Polling just once: * await poller.poll(); * * // We can try to cancel the request here, by calling: * // * // await poller.cancelOperation(); * // * * // Getting the final result: * const result = await poller.pollUntilDone(); * ``` * * The Poller is defined by two types, a type representing the state of the poller, which * must include a basic set of properties from `PollOperationState`, * and a return type defined by `TResult`, which can be anything. * * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. * * ```ts * class Client { * public async makePoller: PollerLike { * const poller = new MyPoller({}); * // It might be preferred to return the poller after the first request is made, * // so that some information can be obtained right away. * await poller.poll(); * return poller; * } * } * * const poller: PollerLike = myClient.makePoller(); * ``` * * A poller can be created through its constructor, then it can be polled until it's completed. * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. * At any point in time, the intermediate forms of the result type can be requested without delay. * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. * * ```ts * const poller = myClient.makePoller(); * const state: MyOperationState = poller.getOperationState(); * * // The intermediate result can be obtained at any time. * const result: MyResult | undefined = poller.getResult(); * * // The final result can only be obtained after the poller finishes. * const result: MyResult = await poller.pollUntilDone(); * ``` * */ var Poller = class { /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * * When writing an implementation of a Poller, this implementation needs to deal with the initialization * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's * operation has already been defined, at least its basic properties. The code below shows how to approach * the definition of the constructor of a new custom poller. * * ```ts * export class MyPoller extends Poller { * constructor({ * // Anything you might need outside of the basics * }) { * let state: MyOperationState = { * privateProperty: private, * publicProperty: public, * }; * * const operation = { * state, * update, * cancel, * toString * } * * // Sending the operation to the parent's constructor. * super(operation); * * // You can assign more local properties here. * } * } * ``` * * Inside of this constructor, a new promise is created. This will be used to * tell the user when the poller finishes (see `pollUntilDone()`). The promise's * resolve and reject methods are also used internally to control when to resolve * or reject anyone waiting for the poller to finish. * * The constructor of a custom implementation of a poller is where any serialized version of * a previous poller's operation should be deserialized into the operation sent to the * base constructor. For example: * * ```ts * export class MyPoller extends Poller { * constructor( * baseOperation: string | undefined * ) { * let state: MyOperationState = {}; * if (baseOperation) { * state = { * ...JSON.parse(baseOperation).state, * ...state * }; * } * const operation = { * state, * // ... * } * super(operation); * } * } * ``` * * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { /** controls whether to throw an error if the operation failed or was canceled. */ this.resolveOnUnsuccessful = false; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; this.reject = reject; }); this.promise.catch(() => {}); } /** * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ async startPolling(pollOptions = {}) { if (this.stopped) this.stopped = false; while (!this.isStopped() && !this.isDone()) { await this.poll(pollOptions); await this.delay(); } } /** * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * @param options - Optional properties passed to the operation's update method. */ async pollOnce(options = {}) { if (!this.isDone()) this.operation = await this.operation.update({ abortSignal: options.abortSignal, fireProgress: this.fireProgress.bind(this) }); this.processUpdatedState(); } /** * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them * the current operation state. * * @param state - The current operation state. */ fireProgress(state$1) { for (const callback of this.pollProgressCallbacks) callback(state$1); } /** * Invokes the underlying operation's cancel method. */ async cancelOnce(options = {}) { this.operation = await this.operation.cancel(options); } /** * Returns a promise that will resolve once a single polling request finishes. * It does this by calling the update method of the Poller's operation. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * @param options - Optional properties passed to the operation's update method. */ poll(options = {}) { if (!this.pollOncePromise) { this.pollOncePromise = this.pollOnce(options); const clearPollOncePromise = () => { this.pollOncePromise = void 0; }; this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); } return this.pollOncePromise; } processUpdatedState() { if (this.operation.state.error) { this.stopped = true; if (!this.resolveOnUnsuccessful) { this.reject(this.operation.state.error); throw this.operation.state.error; } } if (this.operation.state.isCancelled) { this.stopped = true; if (!this.resolveOnUnsuccessful) { const error = new PollerCancelledError("Operation was canceled"); this.reject(error); throw error; } } if (this.isDone() && this.resolve) this.resolve(this.getResult()); } /** * Returns a promise that will resolve once the underlying operation is completed. */ async pollUntilDone(pollOptions = {}) { if (this.stopped) this.startPolling(pollOptions).catch(this.reject); this.processUpdatedState(); return this.promise; } /** * Invokes the provided callback after each polling is completed, * sending the current state of the poller's operation. * * It returns a method that can be used to stop receiving updates on the given callback function. */ onProgress(callback) { this.pollProgressCallbacks.push(callback); return () => { this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); }; } /** * Returns true if the poller has finished polling. */ isDone() { const state$1 = this.operation.state; return Boolean(state$1.isCompleted || state$1.isCancelled || state$1.error); } /** * Stops the poller from continuing to poll. */ stopPolling() { if (!this.stopped) { this.stopped = true; if (this.reject) this.reject(new PollerStoppedError("This poller is already stopped")); } } /** * Returns true if the poller is stopped. */ isStopped() { return this.stopped; } /** * Attempts to cancel the underlying operation. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * If it's called again before it finishes, it will throw an error. * * @param options - Optional properties passed to the operation's update method. */ cancelOperation(options = {}) { if (!this.cancelPromise) this.cancelPromise = this.cancelOnce(options); else if (options.abortSignal) throw new Error("A cancel request is currently pending"); return this.cancelPromise; } /** * Returns the state of the operation. * * Even though TState will be the same type inside any of the methods of any extension of the Poller class, * implementations of the pollers can customize what's shared with the public by writing their own * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller * and a public type representing a safe to share subset of the properties of the internal state. * Their definition of getOperationState can then return their public type. * * Example: * * ```ts * // Let's say we have our poller's operation state defined as: * interface MyOperationState extends PollOperationState { * privateProperty?: string; * publicProperty?: string; * } * * // To allow us to have a true separation of public and private state, we have to define another interface: * interface PublicState extends PollOperationState { * publicProperty?: string; * } * * // Then, we define our Poller as follows: * export class MyPoller extends Poller { * // ... More content is needed here ... * * public getOperationState(): PublicState { * const state: PublicState = this.operation.state; * return { * // Properties from PollOperationState * isStarted: state.isStarted, * isCompleted: state.isCompleted, * isCancelled: state.isCancelled, * error: state.error, * result: state.result, * * // The only other property needed by PublicState. * publicProperty: state.publicProperty * } * } * } * ``` * * You can see this in the tests of this repository, go to the file: * `../test/utils/testPoller.ts` * and look for the getOperationState implementation. */ getOperationState() { return this.operation.state; } /** * Returns the result value of the operation, * regardless of the state of the poller. * It can return undefined or an incomplete form of the final TResult value * depending on the implementation. */ getResult() { const state$1 = this.operation.state; return state$1.result; } /** * Returns a serialized version of the poller's operation * by invoking the operation's toString method. */ toString() { return this.operation.toString(); } }; exports.Poller = Poller; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js var require_lroEngine$1 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.LroEngine = void 0; const operation_js_1 = require_operation(); const constants_js_1 = require_constants(); const poller_js_1$1 = require_poller(); const operation_js_2 = require_operation$2(); /** * The LRO Engine, a class that performs polling. */ var LroEngine = class extends poller_js_1$1.Poller { constructor(lro, options) { const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState } = options || {}; const state$1 = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; const operation = new operation_js_1.GenericPollOperation(state$1, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); super(operation); this.resolveOnUnsuccessful = resolveOnUnsuccessful; this.config = { intervalInMs }; operation.setPollerConfig(this.config); } /** * The method used by the poller to wait before attempting to update its operation. */ delay() { return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); } }; exports.LroEngine = LroEngine; } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js var require_lroEngine = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.LroEngine = void 0; var lroEngine_js_1 = require_lroEngine$1(); Object.defineProperty(exports, "LroEngine", { enumerable: true, get: function() { return lroEngine_js_1.LroEngine; } }); } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js var require_pollOperation = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/legacy/pollOperation.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); } }); //#endregion //#region node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/index.js var require_commonjs$2 = __commonJS({ "node_modules/.deno/@azure+core-lro@2.7.2/node_modules/@azure/core-lro/dist/commonjs/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.createHttpPoller = void 0; const tslib_1 = require_tslib(); var poller_js_1 = require_poller$1(); Object.defineProperty(exports, "createHttpPoller", { enumerable: true, get: function() { return poller_js_1.createHttpPoller; } }); /** * This can be uncommented to expose the protocol-agnostic poller */ /** legacy */ tslib_1.__exportStar(require_lroEngine(), exports); tslib_1.__exportStar(require_poller(), exports); tslib_1.__exportStar(require_pollOperation(), exports); } }); //#endregion //#region node_modules/.deno/@azure+storage-blob@12.27.0/node_modules/@azure/storage-blob/dist/index.js var require_dist$1 = __commonJS({ "node_modules/.deno/@azure+storage-blob@12.27.0/node_modules/@azure/storage-blob/dist/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); var coreRestPipeline = require_commonjs$7(); var tslib = require_tslib(); var coreAuth = require_commonjs$6(); var coreUtil = require_commonjs$9(); var coreHttpCompat = require_commonjs$4(); var coreClient = require_commonjs$5(); var coreXml = require_commonjs$3(); var logger$1 = require_commonjs$11(); var abortController = require_commonjs$10(); var crypto$1 = __require("crypto"); var coreTracing = require_commonjs$8(); var stream$2 = __require("stream"); var coreLro = require_commonjs$2(); var events = __require("events"); var fs$4 = __require("fs"); var util$2 = __require("util"); var buffer$1 = __require("buffer"); function _interopNamespaceDefault(e) { var n = Object.create(null); if (e) Object.keys(e).forEach(function(k) { if (k !== "default") { var d$1 = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d$1.get ? d$1 : { enumerable: true, get: function() { return e[k]; } }); } }); n.default = e; return Object.freeze(n); } var coreHttpCompat__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /* @__PURE__ */ _interopNamespaceDefault(coreClient); var fs__namespace = /* @__PURE__ */ _interopNamespaceDefault(fs$4); var util__namespace = /* @__PURE__ */ _interopNamespaceDefault(util$2); /** * The `@azure/logger` configuration for this package. */ const logger = logger$1.createClientLogger("storage-blob"); /** * The base class from which all request policies derive. */ var BaseRequestPolicy = class { /** * The main method to implement that manipulates a request/response. */ constructor(_nextPolicy, _options) { this._nextPolicy = _nextPolicy; this._options = _options; } /** * Get whether or not a log with the provided log level should be logged. * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ shouldLog(logLevel) { return this._options.shouldLog(logLevel); } /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meat the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ log(logLevel, message) { this._options.log(logLevel, message); } }; const SDK_VERSION = "12.27.0"; const SERVICE_VERSION = "2025-05-05"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4e3 * 1024 * 1024; const BLOCK_BLOB_MAX_BLOCKS = 5e4; const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; const REQUEST_TIMEOUT = 100 * 1e3; /** * The OAuth scope to use with Azure Storage. */ const StorageOAuthScopes = "https://storage.azure.com/.default"; const URLConstants = { Parameters: { FORCE_BROWSER_NO_CACHE: "_", SNAPSHOT: "snapshot", VERSIONID: "versionid", TIMEOUT: "timeout" } }; const HTTPURLConnection = { HTTP_ACCEPTED: 202 }; const HeaderConstants = { AUTHORIZATION: "Authorization", CONTENT_ENCODING: "Content-Encoding", CONTENT_ID: "Content-ID", CONTENT_LANGUAGE: "Content-Language", CONTENT_LENGTH: "Content-Length", CONTENT_MD5: "Content-Md5", CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", CONTENT_TYPE: "Content-Type", COOKIE: "Cookie", DATE: "date", IF_MATCH: "if-match", IF_MODIFIED_SINCE: "if-modified-since", IF_NONE_MATCH: "if-none-match", IF_UNMODIFIED_SINCE: "if-unmodified-since", PREFIX_FOR_STORAGE: "x-ms-", RANGE: "Range", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", X_MS_VERSION: "x-ms-version" }; const ETagNone = ""; const ETagAny = "*"; const SIZE_1_MB = 1 * 1024 * 1024; const BATCH_MAX_REQUEST = 256; const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; const HTTP_LINE_ENDING = "\r\n"; const HTTP_VERSION_1_1 = "HTTP/1.1"; const EncryptionAlgorithmAES25 = "AES256"; const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; const StorageBlobLoggingAllowedHeaderNames = [ "Access-Control-Allow-Origin", "Cache-Control", "Content-Length", "Content-Type", "Date", "Request-Id", "traceparent", "Transfer-Encoding", "User-Agent", "x-ms-client-request-id", "x-ms-date", "x-ms-error-code", "x-ms-request-id", "x-ms-return-client-request-id", "x-ms-version", "Accept-Ranges", "Content-Disposition", "Content-Encoding", "Content-Language", "Content-MD5", "Content-Range", "ETag", "Last-Modified", "Server", "Vary", "x-ms-content-crc64", "x-ms-copy-action", "x-ms-copy-completion-time", "x-ms-copy-id", "x-ms-copy-progress", "x-ms-copy-status", "x-ms-has-immutability-policy", "x-ms-has-legal-hold", "x-ms-lease-state", "x-ms-lease-status", "x-ms-range", "x-ms-request-server-encrypted", "x-ms-server-encrypted", "x-ms-snapshot", "x-ms-source-range", "If-Match", "If-Modified-Since", "If-None-Match", "If-Unmodified-Since", "x-ms-access-tier", "x-ms-access-tier-change-time", "x-ms-access-tier-inferred", "x-ms-account-kind", "x-ms-archive-status", "x-ms-blob-append-offset", "x-ms-blob-cache-control", "x-ms-blob-committed-block-count", "x-ms-blob-condition-appendpos", "x-ms-blob-condition-maxsize", "x-ms-blob-content-disposition", "x-ms-blob-content-encoding", "x-ms-blob-content-language", "x-ms-blob-content-length", "x-ms-blob-content-md5", "x-ms-blob-content-type", "x-ms-blob-public-access", "x-ms-blob-sequence-number", "x-ms-blob-type", "x-ms-copy-destination-snapshot", "x-ms-creation-time", "x-ms-default-encryption-scope", "x-ms-delete-snapshots", "x-ms-delete-type-permanent", "x-ms-deny-encryption-scope-override", "x-ms-encryption-algorithm", "x-ms-if-sequence-number-eq", "x-ms-if-sequence-number-le", "x-ms-if-sequence-number-lt", "x-ms-incremental-copy", "x-ms-lease-action", "x-ms-lease-break-period", "x-ms-lease-duration", "x-ms-lease-id", "x-ms-lease-time", "x-ms-page-write", "x-ms-proposed-lease-id", "x-ms-range-get-content-md5", "x-ms-rehydrate-priority", "x-ms-sequence-number-action", "x-ms-sku-name", "x-ms-source-content-md5", "x-ms-source-if-match", "x-ms-source-if-modified-since", "x-ms-source-if-none-match", "x-ms-source-if-unmodified-since", "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-copy-source-error-code", "x-ms-copy-source-status-code", "x-ms-if-tags", "x-ms-source-if-tags" ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", "maxresults", "rscc", "rscd", "rsce", "rscl", "rsct", "se", "si", "sip", "sp", "spr", "sr", "srt", "ss", "st", "sv", "include", "marker", "prefix", "copyid", "restype", "blockid", "blocklisttype", "delimiter", "prevsnapshot", "ske", "skoid", "sks", "skt", "sktid", "skv", "snapshot" ]; const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; const PathStylePorts = [ "10000", "10001", "10002", "10003", "10004", "10100", "10101", "10102", "10103", "10104", "11000", "11001", "11002", "11003", "11004", "11100", "11101", "11102", "11103", "11104" ]; /** * Reserved URL characters must be properly escaped for Storage services like Blob or File. * * ## URL encode and escape strategy for JS SDKs * * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. * * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. * * This is what legacy V2 SDK does, simple and works for most of the cases. * - When customer URL string is "http://account.blob.core.windows.net/con/b:", * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. * * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: * * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. * * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. * - When customer URL string is "http://account.blob.core.windows.net/con/b:", * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. * * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. * And following URL strings are invalid: * - "http://account.blob.core.windows.net/con/b%" * - "http://account.blob.core.windows.net/con/b%2" * - "http://account.blob.core.windows.net/con/b%G" * * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. * * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` * * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * @see https://learn.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata * * @param url - */ function escapeURLPath(url$1) { const urlParsed = new URL(url$1); let path$13 = urlParsed.pathname; path$13 = path$13 || "/"; path$13 = escape(path$13); urlParsed.pathname = path$13; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { let proxyUri = ""; if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { const matchCredentials = connectionString.split(";"); for (const element of matchCredentials) if (element.trim().startsWith("DevelopmentStorageProxyUri=")) proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } return proxyUri; } function getValueInConnString(connectionString, argument) { const elements = connectionString.split(";"); for (const element of elements) if (element.trim().startsWith(argument)) return element.trim().match(argument + "=(.*)")[1]; return ""; } /** * Extracts the parts of an Azure Storage account connection string. * * @param connectionString - Connection string. * @returns String key value pairs of the storage account's url and credentials. */ function extractConnectionStringParts(connectionString) { let proxyUri = ""; if (connectionString.startsWith("UseDevelopmentStorage=true")) { proxyUri = getProxyUriFromDevConnString(connectionString); connectionString = DevelopmentConnectionString; } let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { let defaultEndpointsProtocol = ""; let accountName = ""; let accountKey = Buffer.from("accountKey", "base64"); let endpointSuffix = ""; accountName = getValueInConnString(connectionString, "AccountName"); accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); if (!blobEndpoint) { defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); const protocol = defaultEndpointsProtocol.toLowerCase(); if (protocol !== "https" && protocol !== "http") throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); if (!endpointSuffix) throw new Error("Invalid EndpointSuffix in the provided Connection String"); blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } if (!accountName) throw new Error("Invalid AccountName in the provided Connection String"); else if (accountKey.length === 0) throw new Error("Invalid AccountKey in the provided Connection String"); return { kind: "AccountConnString", url: blobEndpoint, accountName, accountKey, proxyUri }; } else { let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); let accountName = getValueInConnString(connectionString, "AccountName"); if (!accountName) accountName = getAccountNameFromUrl(blobEndpoint); if (!blobEndpoint) throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); else if (!accountSas) throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); if (accountSas.startsWith("?")) accountSas = accountSas.substring(1); return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } /** * Internal escape method implemented Strategy Two mentioned in escapeURL() description. * * @param text - */ function escape(text) { return encodeURIComponent(text).replace(/%2F/g, "/").replace(/'/g, "%27").replace(/\+/g, "%20").replace(/%25/g, "%"); } /** * Append a string to URL path. Will remove duplicated "/" in front of the string * when URL path ends with a "/". * * @param url - Source URL string * @param name - String to be appended to URL * @returns An updated URL string */ function appendToURLPath(url$1, name) { const urlParsed = new URL(url$1); let path$13 = urlParsed.pathname; path$13 = path$13 ? path$13.endsWith("/") ? `${path$13}${name}` : `${path$13}/${name}` : name; urlParsed.pathname = path$13; return urlParsed.toString(); } /** * Set URL parameter name and value. If name exists in URL parameters, old value * will be replaced by name key. If not provide value, the parameter will be deleted. * * @param url - Source URL string * @param name - Parameter name * @param value - Parameter value * @returns An updated URL string */ function setURLParameter(url$1, name, value) { const urlParsed = new URL(url$1); const encodedName = encodeURIComponent(name); const encodedValue = value ? encodeURIComponent(value) : void 0; const searchString = urlParsed.search === "" ? "?" : urlParsed.search; const searchPieces = []; for (const pair of searchString.slice(1).split("&")) if (pair) { const [key] = pair.split("=", 2); if (key !== encodedName) searchPieces.push(pair); } if (encodedValue) searchPieces.push(`${encodedName}=${encodedValue}`); urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return urlParsed.toString(); } /** * Get URL parameter by name. * * @param url - * @param name - */ function getURLParameter(url$1, name) { var _a$2; const urlParsed = new URL(url$1); return (_a$2 = urlParsed.searchParams.get(name)) !== null && _a$2 !== void 0 ? _a$2 : void 0; } /** * Set URL host. * * @param url - Source URL string * @param host - New host string * @returns An updated URL string */ function setURLHost(url$1, host) { const urlParsed = new URL(url$1); urlParsed.hostname = host; return urlParsed.toString(); } /** * Get URL path from an URL string. * * @param url - Source URL string */ function getURLPath(url$1) { try { const urlParsed = new URL(url$1); return urlParsed.pathname; } catch (e) { return void 0; } } /** * Get URL scheme from an URL string. * * @param url - Source URL string */ function getURLScheme(url$1) { try { const urlParsed = new URL(url$1); return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } catch (e) { return void 0; } } /** * Get URL path and query from an URL string. * * @param url - Source URL string */ function getURLPathAndQuery(url$1) { const urlParsed = new URL(url$1); const pathString = urlParsed.pathname; if (!pathString) throw new RangeError("Invalid url without valid path."); let queryString = urlParsed.search || ""; queryString = queryString.trim(); if (queryString !== "") queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; return `${pathString}${queryString}`; } /** * Get URL query key value pairs from an URL string. * * @param url - */ function getURLQueries(url$1) { let queryString = new URL(url$1).search; if (!queryString) return {}; queryString = queryString.trim(); queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; let querySubStrings = queryString.split("&"); querySubStrings = querySubStrings.filter((value) => { const indexOfEqual = value.indexOf("="); const lastIndexOfEqual = value.lastIndexOf("="); return indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1; }); const queries = {}; for (const querySubString of querySubStrings) { const splitResults = querySubString.split("="); const key = splitResults[0]; const value = splitResults[1]; queries[key] = value; } return queries; } /** * Append a string to URL query. * * @param url - Source URL string. * @param queryParts - String to be appended to the URL query. * @returns An updated URL string. */ function appendToURLQuery(url$1, queryParts) { const urlParsed = new URL(url$1); let query = urlParsed.search; if (query) query += "&" + queryParts; else query = queryParts; urlParsed.search = query; return urlParsed.toString(); } /** * Rounds a date off to seconds. * * @param date - * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; * If false, YYYY-MM-DDThh:mm:ssZ will be returned. * @returns Date string in ISO8061 format, with or without 7 milliseconds component */ function truncatedISO8061Date(date, withMilliseconds = true) { const dateString = date.toISOString(); return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000Z" : dateString.substring(0, dateString.length - 5) + "Z"; } /** * Base64 encode. * * @param content - */ function base64encode$1(content) { return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); } /** * Generate a 64 bytes base64 block ID string. * * @param blockIndex - */ function generateBlockID(blockIDPrefix, blockIndex) { const maxSourceStringLength = 48; const maxBlockIndexLength = 6; const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); return base64encode$1(res); } /** * Delay specified time interval. * * @param timeInMs - * @param aborter - * @param abortError - */ async function delay$1(timeInMs, aborter, abortError) { return new Promise((resolve, reject) => { let timeout; const abortHandler = () => { if (timeout !== void 0) clearTimeout(timeout); reject(abortError); }; const resolveHandler = () => { if (aborter !== void 0) aborter.removeEventListener("abort", abortHandler); resolve(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== void 0) aborter.addEventListener("abort", abortHandler); }); } /** * String.prototype.padStart() * * @param currentString - * @param targetLength - * @param padString - */ function padStart(currentString, targetLength, padString = " ") { if (String.prototype.padStart) return currentString.padStart(targetLength, padString); padString = padString || " "; if (currentString.length > targetLength) return currentString; else { targetLength = targetLength - currentString.length; if (targetLength > padString.length) padString += padString.repeat(targetLength / padString.length); return padString.slice(0, targetLength) + currentString; } } /** * If two strings are equal when compared case insensitive. * * @param str1 - * @param str2 - */ function iEqual(str1, str2) { return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } /** * Extracts account name from the url * @param url - url to extract the account name from * @returns with the account name */ function getAccountNameFromUrl(url$1) { const parsedUrl = new URL(url$1); let accountName; try { if (parsedUrl.hostname.split(".")[1] === "blob") accountName = parsedUrl.hostname.split(".")[0]; else if (isIpEndpointStyle(parsedUrl)) accountName = parsedUrl.pathname.split("/")[1]; else accountName = ""; return accountName; } catch (error) { throw new Error("Unable to extract accountName with provided information."); } } function isIpEndpointStyle(parsedUrl) { const host = parsedUrl.host; return /^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port); } /** * Convert Tags to encoded string. * * @param tags - */ function toBlobTagsString(tags$1) { if (tags$1 === void 0) return void 0; const tagPairs = []; for (const key in tags$1) if (Object.prototype.hasOwnProperty.call(tags$1, key)) { const value = tags$1[key]; tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); } return tagPairs.join("&"); } /** * Convert Tags type to BlobTags. * * @param tags - */ function toBlobTags(tags$1) { if (tags$1 === void 0) return void 0; const res = { blobTagSet: [] }; for (const key in tags$1) if (Object.prototype.hasOwnProperty.call(tags$1, key)) { const value = tags$1[key]; res.blobTagSet.push({ key, value }); } return res; } /** * Covert BlobTags to Tags type. * * @param tags - */ function toTags(tags$1) { if (tags$1 === void 0) return void 0; const res = {}; for (const blobTag of tags$1.blobTagSet) res[blobTag.key] = blobTag.value; return res; } /** * Convert BlobQueryTextConfiguration to QuerySerialization type. * * @param textConfiguration - */ function toQuerySerialization(textConfiguration) { if (textConfiguration === void 0) return void 0; switch (textConfiguration.kind) { case "csv": return { format: { type: "delimited", delimitedTextConfiguration: { columnSeparator: textConfiguration.columnSeparator || ",", fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", headersPresent: textConfiguration.hasHeaders || false } } }; case "json": return { format: { type: "json", jsonTextConfiguration: { recordSeparator: textConfiguration.recordSeparator } } }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { schema: textConfiguration.schema } } }; case "parquet": return { format: { type: "parquet" } }; default: throw Error("Invalid BlobQueryTextConfiguration."); } } function parseObjectReplicationRecord(objectReplicationRecord) { if (!objectReplicationRecord) return void 0; if ("policy-id" in objectReplicationRecord) return void 0; const orProperties = []; for (const key in objectReplicationRecord) { const ids = key.split("_"); const policyPrefix = "or-"; if (ids[0].startsWith(policyPrefix)) ids[0] = ids[0].substring(policyPrefix.length); const rule = { ruleId: ids[1], replicationStatus: objectReplicationRecord[key] }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) orProperties[policyIndex].rules.push(rule); else orProperties.push({ policyId: ids[0], rules: [rule] }); } return orProperties; } function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : void 0; } function BlobNameToString(name) { if (name.encoded) return decodeURIComponent(name.content); else return name.content; } function ConvertInternalResponseOfListBlobFlat(internalResponse) { return Object.assign(Object.assign({}, internalResponse), { segment: { blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); return blobItem; }) } }); } function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { var _a$2; return Object.assign(Object.assign({}, internalResponse), { segment: { blobPrefixes: (_a$2 = internalResponse.segment.blobPrefixes) === null || _a$2 === void 0 ? void 0 : _a$2.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }), blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); return blobItem; }) } }); } function* ExtractPageRangeInfoItems(getPageRangesSegment) { let pageRange = []; let clearRange = []; if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange; if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange; let pageRangeIndex = 0; let clearRangeIndex = 0; while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { yield { start: pageRange[pageRangeIndex].start, end: pageRange[pageRangeIndex].end, isClear: false }; ++pageRangeIndex; } else { yield { start: clearRange[clearRangeIndex].start, end: clearRange[clearRangeIndex].end, isClear: true }; ++clearRangeIndex; } for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) yield { start: pageRange[pageRangeIndex].start, end: pageRange[pageRangeIndex].end, isClear: false }; for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) yield { start: clearRange[clearRangeIndex].start, end: clearRange[clearRangeIndex].end, isClear: true }; } /** * Escape the blobName but keep path separator ('/'). */ function EscapePath(blobName) { const split = blobName.split("/"); for (let i = 0; i < split.length; i++) split[i] = encodeURIComponent(split[i]); return split.join("/"); } /** * A typesafe helper for ensuring that a given response object has * the original _response attached. * @param response - A response object from calling a client operation * @returns The same object, but with known _response property */ function assertResponse(response) { if (`_response` in response) return response; throw new TypeError(`Unexpected response object ${response}`); } /** * RetryPolicy types. */ exports.StorageRetryPolicyType = void 0; (function(StorageRetryPolicyType$1) { /** * Exponential retry. Retry time delay grows exponentially. */ StorageRetryPolicyType$1[StorageRetryPolicyType$1["EXPONENTIAL"] = 0] = "EXPONENTIAL"; /** * Linear retry. Retry time delay grows linearly. */ StorageRetryPolicyType$1[StorageRetryPolicyType$1["FIXED"] = 1] = "FIXED"; })(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); const DEFAULT_RETRY_OPTIONS$1 = { maxRetryDelayInMs: 120 * 1e3, maxTries: 4, retryDelayInMs: 4 * 1e3, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: void 0 }; const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ var StorageRetryPolicy = class extends BaseRequestPolicy { /** * Creates an instance of RetryPolicy. * * @param nextPolicy - * @param options - * @param retryOptions - */ constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { super(nextPolicy, options); this.retryOptions = { retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS$1.maxTries, tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS$1.secondaryHost }; } /** * Sends request. * * @param request - */ async sendRequest(request) { return this.attemptSendRequest(request, false, 1); } /** * Decide and perform next retry. Won't mutate request parameter. * * @param request - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then * the resource was not found. This may be due to replication delay. So, in this * case, we'll never try the secondary again for this operation. * @param attempt - How many retries has been attempted to performed, starting from 1, which includes * the attempt will be performed by this method call. */ async attemptSendRequest(request, secondaryHas404, attempt) { const newRequest = request.clone(); const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; if (!isPrimaryRetry) newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); if (this.retryOptions.tryTimeoutInMs) newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1e3).toString()); let response; try { logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await this._nextPolicy.sendRequest(newRequest); if (!this.shouldRetry(isPrimaryRetry, attempt, response)) return response; secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; } catch (err) { logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) throw err; } await this.delay(isPrimaryRetry, attempt, request.abortSignal); return this.attemptSendRequest(request, secondaryHas404, ++attempt); } /** * Decide whether to retry according to last HTTP response and retry counters. * * @param isPrimaryRetry - * @param attempt - * @param response - * @param err - */ shouldRetry(isPrimaryRetry, attempt, response, err) { if (attempt >= this.retryOptions.maxTries) { logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions.maxTries}, no further try.`); return false; } const retriableErrors$1 = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET", "ENOENT", "ENOTFOUND", "TIMEOUT", "EPIPE", "REQUEST_SEND_ERROR" ]; if (err) { for (const retriableError of retriableErrors$1) if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || err.code && err.code.toString().toUpperCase() === retriableError) { logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } } if (response || err) { const statusCode = response ? response.status : err ? err.statusCode : 0; if (!isPrimaryRetry && statusCode === 404) { logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } if (statusCode === 503 || statusCode === 500) { logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } return false; } /** * Delay a calculated time between retries. * * @param isPrimaryRetry - * @param attempt - * @param abortSignal - */ async delay(isPrimaryRetry, attempt, abortSignal$1) { let delayTimeInMs = 0; if (isPrimaryRetry) switch (this.retryOptions.retryPolicyType) { case exports.StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); break; case exports.StorageRetryPolicyType.FIXED: delayTimeInMs = this.retryOptions.retryDelayInMs; break; } else delayTimeInMs = Math.random() * 1e3; logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delay$1(delayTimeInMs, abortSignal$1, RETRY_ABORT_ERROR$1); } }; /** * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. */ var StorageRetryPolicyFactory = class { /** * Creates an instance of StorageRetryPolicyFactory. * @param retryOptions - */ constructor(retryOptions) { this.retryOptions = retryOptions; } /** * Creates a StorageRetryPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); } }; /** * Credential policy used to sign HTTP(S) requests before sending. This is an * abstract class. */ var CredentialPolicy = class extends BaseRequestPolicy { /** * Sends out request. * * @param request - */ sendRequest(request) { return this._nextPolicy.sendRequest(this.signRequest(request)); } /** * Child classes must implement this method with request signing. This method * will be executed in {@link sendRequest}. * * @param request - */ signRequest(request) { return request; } }; const table_lv0 = new Uint32Array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1820, 0, 1823, 1825, 1827, 1829, 0, 0, 0, 1837, 2051, 0, 0, 1843, 0, 3331, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 0, 0, 0, 0, 0, 0, 0, 3586, 3593, 3594, 3610, 3617, 3619, 3621, 3628, 3634, 3637, 3638, 3656, 3665, 3696, 3708, 3710, 3721, 3722, 3729, 3737, 3743, 3746, 3748, 3750, 3751, 3753, 0, 0, 0, 1859, 1860, 1864, 3586, 3593, 3594, 3610, 3617, 3619, 3621, 3628, 3634, 3637, 3638, 3656, 3665, 3696, 3708, 3710, 3721, 3722, 3729, 3737, 3743, 3746, 3748, 3750, 3751, 3753, 0, 1868, 0, 1872, 0 ]); const table_lv2 = new Uint32Array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]); const table_lv4 = new Uint32Array([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32786, 0, 0, 0, 0, 0, 33298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]); function compareHeader(lhs, rhs) { if (isLessThan(lhs, rhs)) return -1; return 1; } function isLessThan(lhs, rhs) { const tables = [ table_lv0, table_lv2, table_lv4 ]; let curr_level = 0; let i = 0; let j = 0; while (curr_level < tables.length) { if (curr_level === tables.length - 1 && i !== j) return i > j; const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 1; const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 1; if (weight1 === 1 && weight2 === 1) { i = 0; j = 0; ++curr_level; } else if (weight1 === weight2) { ++i; ++j; } else if (weight1 === 0) ++i; else if (weight2 === 0) ++j; else return weight1 < weight2; } return false; } /** * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. */ var StorageSharedKeyCredentialPolicy = class extends CredentialPolicy { /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - * @param options - * @param factory - */ constructor(nextPolicy, options, factory) { super(nextPolicy, options); this.factory = factory; } /** * Signs request. * * @param request - */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && (typeof request.body === "string" || request.body !== void 0) && request.body.length > 0) request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); const stringToSign = [ request.method.toUpperCase(), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), this.getHeaderValueToSign(request, HeaderConstants.DATE), this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), this.getHeaderValueToSign(request, HeaderConstants.RANGE) ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); const signature = this.factory.computeHMACSHA256(stringToSign); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); return request; } /** * Retrieve header value according to shared key sign rules. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key * * @param request - * @param headerName - */ getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) return ""; if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") return ""; return value; } /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. * 2. Convert each HTTP header name to lowercase. * 3. Sort the headers lexicographically by header name, in ascending order. * Each header may appear only once in the string. * 4. Replace any linear whitespace in the header value with a single space. * 5. Trim any whitespace around the colon in the header. * 6. Finally, append a new-line character to each canonicalized header in the resulting list. * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. * * @param request - */ getCanonicalizedHeadersString(request) { let headersArray = request.headers.headersArray().filter((value) => { return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); }); headersArray.sort((a, b) => { return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) return false; return true; }); let canonicalizedHeadersStringToSign = ""; headersArray.forEach((header) => { canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()}\n`; }); return canonicalizedHeadersStringToSign; } /** * Retrieves the webResource canonicalized resource string. * * @param request - */ getCanonicalizedResourceString(request) { const path$13 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${this.factory.accountName}${path$13}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; for (const key in queries) if (Object.prototype.hasOwnProperty.call(queries, key)) { const lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } queryKeys.sort(); for (const key of queryKeys) canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return canonicalizedResourceString; } }; /** * Credential is an abstract class for Azure Storage HTTP requests signing. This * class will host an credentialPolicyCreator factory which generates CredentialPolicy. */ var Credential = class { /** * Creates a RequestPolicy object. * * @param _nextPolicy - * @param _options - */ create(_nextPolicy, _options) { throw new Error("Method should be implemented in children classes."); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * StorageSharedKeyCredential for account key authorization of Azure Storage service. */ var StorageSharedKeyCredential = class extends Credential { /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - * @param accountKey - */ constructor(accountName, accountKey) { super(); this.accountName = accountName; this.accountKey = Buffer.from(accountKey, "base64"); } /** * Creates a StorageSharedKeyCredentialPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ computeHMACSHA256(stringToSign) { return crypto$1.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; /** * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources * or for use with Shared Access Signatures (SAS). */ var AnonymousCredentialPolicy = class extends CredentialPolicy { /** * Creates an instance of AnonymousCredentialPolicy. * @param nextPolicy - * @param options - */ constructor(nextPolicy, options) { super(nextPolicy, options); } }; /** * AnonymousCredential provides a credentialPolicyCreator member used to create * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with * HTTP(S) requests that read public resources or for use with Shared Access * Signatures (SAS). */ var AnonymousCredential = class extends Credential { /** * Creates an {@link AnonymousCredentialPolicy} object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new AnonymousCredentialPolicy(nextPolicy, options); } }; let _defaultHttpClient; function getCachedDefaultHttpClient() { if (!_defaultHttpClient) _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); return _defaultHttpClient; } /** * The programmatic identifier of the StorageBrowserPolicy. */ const storageBrowserPolicyName = "storageBrowserPolicy"; /** * storageBrowserPolicy is a policy used to prevent browsers from caching requests * and to remove cookies and explicit content-length headers. */ function storageBrowserPolicy() { return { name: storageBrowserPolicyName, async sendRequest(request, next) { if (coreUtil.isNode) return next(request); if (request.method === "GET" || request.method === "HEAD") request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); request.headers.delete(HeaderConstants.COOKIE); request.headers.delete(HeaderConstants.CONTENT_LENGTH); return next(request); } }; } /** * Name of the {@link storageRetryPolicy} */ const storageRetryPolicyName = "storageRetryPolicy"; /** * RetryPolicy types. */ var StorageRetryPolicyType; (function(StorageRetryPolicyType$1) { /** * Exponential retry. Retry time delay grows exponentially. */ StorageRetryPolicyType$1[StorageRetryPolicyType$1["EXPONENTIAL"] = 0] = "EXPONENTIAL"; /** * Linear retry. Retry time delay grows linearly. */ StorageRetryPolicyType$1[StorageRetryPolicyType$1["FIXED"] = 1] = "FIXED"; })(StorageRetryPolicyType || (StorageRetryPolicyType = {})); const DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1e3, maxTries: 4, retryDelayInMs: 4 * 1e3, retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: void 0 }; const retriableErrors = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET", "ENOENT", "ENOTFOUND", "TIMEOUT", "EPIPE", "REQUEST_SEND_ERROR" ]; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ function storageRetryPolicy(options = {}) { var _a$2, _b$1, _c$1, _d$1, _e, _f; const retryPolicyType = (_a$2 = options.retryPolicyType) !== null && _a$2 !== void 0 ? _a$2 : DEFAULT_RETRY_OPTIONS.retryPolicyType; const maxTries = (_b$1 = options.maxTries) !== null && _b$1 !== void 0 ? _b$1 : DEFAULT_RETRY_OPTIONS.maxTries; const retryDelayInMs = (_c$1 = options.retryDelayInMs) !== null && _c$1 !== void 0 ? _c$1 : DEFAULT_RETRY_OPTIONS.retryDelayInMs; const maxRetryDelayInMs = (_d$1 = options.maxRetryDelayInMs) !== null && _d$1 !== void 0 ? _d$1 : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; function shouldRetry({ isPrimaryRetry, attempt, response, error }) { var _a$3, _b$2; if (attempt >= maxTries) { logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); return false; } if (error) { for (const retriableError of retriableErrors) if (error.name.toUpperCase().includes(retriableError) || error.message.toUpperCase().includes(retriableError) || error.code && error.code.toString().toUpperCase() === retriableError) { logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } } if (response || error) { const statusCode = (_b$2 = (_a$3 = response === null || response === void 0 ? void 0 : response.status) !== null && _a$3 !== void 0 ? _a$3 : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b$2 !== void 0 ? _b$2 : 0; if (!isPrimaryRetry && statusCode === 404) { logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } if (statusCode === 503 || statusCode === 500) { logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } return false; } function calculateDelay(isPrimaryRetry, attempt) { let delayTimeInMs = 0; if (isPrimaryRetry) switch (retryPolicyType) { case StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); break; case StorageRetryPolicyType.FIXED: delayTimeInMs = retryDelayInMs; break; } else delayTimeInMs = Math.random() * 1e3; logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delayTimeInMs; } return { name: storageRetryPolicyName, async sendRequest(request, next) { if (tryTimeoutInMs) request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1e3))); const primaryUrl = request.url; const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : void 0; let secondaryHas404 = false; let attempt = 1; let retryAgain = true; let response; let error; while (retryAgain) { const isPrimaryRetry = secondaryHas404 || !secondaryUrl || ![ "GET", "HEAD", "OPTIONS" ].includes(request.method) || attempt % 2 === 1; request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; response = void 0; error = void 0; try { logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await next(request); secondaryHas404 = secondaryHas404 || !isPrimaryRetry && response.status === 404; } catch (e) { if (coreRestPipeline.isRestError(e)) { logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); error = e; } else { logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); throw e; } } retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); if (retryAgain) await delay$1(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); attempt++; } if (response) return response; throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); } }; } /** * The programmatic identifier of the storageSharedKeyCredentialPolicy. */ const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; /** * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. */ function storageSharedKeyCredentialPolicy(options) { function signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); const stringToSign = [ request.method.toUpperCase(), getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), getHeaderValueToSign(request, HeaderConstants.DATE), getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.IF_MATCH), getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.RANGE) ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); const signature = crypto$1.createHmac("sha256", options.accountKey).update(stringToSign, "utf8").digest("base64"); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); } /** * Retrieve header value according to shared key sign rules. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key */ function getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) return ""; if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") return ""; return value; } /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. * 2. Convert each HTTP header name to lowercase. * 3. Sort the headers lexicographically by header name, in ascending order. * Each header may appear only once in the string. * 4. Replace any linear whitespace in the header value with a single space. * 5. Trim any whitespace around the colon in the header. * 6. Finally, append a new-line character to each canonicalized header in the resulting list. * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. * */ function getCanonicalizedHeadersString(request) { let headersArray = []; for (const [name, value] of request.headers) if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) headersArray.push({ name, value }); headersArray.sort((a, b) => { return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); }); headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) return false; return true; }); let canonicalizedHeadersStringToSign = ""; headersArray.forEach((header) => { canonicalizedHeadersStringToSign += `${header.name.toLowerCase().trimRight()}:${header.value.trimLeft()}\n`; }); return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { const path$13 = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${options.accountName}${path$13}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; for (const key in queries) if (Object.prototype.hasOwnProperty.call(queries, key)) { const lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } queryKeys.sort(); for (const key of queryKeys) canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } return canonicalizedResourceString; } return { name: storageSharedKeyCredentialPolicyName, async sendRequest(request, next) { signRequest(request); return next(request); } }; } /** * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: * * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL * thus avoid the browser cache. * * 2. Remove cookie header for security * * 3. Remove content-length header to avoid browsers warning */ var StorageBrowserPolicy = class extends BaseRequestPolicy { /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - */ constructor(nextPolicy, options) { super(nextPolicy, options); } /** * Sends out request. * * @param request - */ async sendRequest(request) { if (coreUtil.isNode) return this._nextPolicy.sendRequest(request); if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); request.headers.remove(HeaderConstants.COOKIE); request.headers.remove(HeaderConstants.CONTENT_LENGTH); return this._nextPolicy.sendRequest(request); } }; /** * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. */ var StorageBrowserPolicyFactory = class { /** * Creates a StorageBrowserPolicyFactory object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageBrowserPolicy(nextPolicy, options); } }; /** * The programmatic identifier of the storageCorrectContentLengthPolicy. */ const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; /** * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length. */ function storageCorrectContentLengthPolicy() { function correctContentLength(request) { if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } return { name: storageCorrectContentLengthPolicyName, async sendRequest(request, next) { correctContentLength(request); return next(request); } }; } /** * A helper to decide if a given argument satisfies the Pipeline contract * @param pipeline - An argument that may be a Pipeline * @returns true when the argument satisfies the Pipeline contract */ function isPipelineLike(pipeline) { if (!pipeline || typeof pipeline !== "object") return false; const castPipeline = pipeline; return Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"; } /** * A Pipeline class containing HTTP request policies. * You can create a default Pipeline by calling {@link newPipeline}. * Or you can create a Pipeline with your own policies by the constructor of Pipeline. * * Refer to {@link newPipeline} and provided policies before implementing your * customized Pipeline. */ var Pipeline = class { /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * * @param factories - * @param options - */ constructor(factories, options = {}) { this.factories = factories; this.options = options; } /** * Transfer Pipeline object to ServiceClientOptions object which is required by * ServiceClient constructor. * * @returns The ServiceClientOptions object from this Pipeline. */ toServiceClientOptions() { return { httpClient: this.options.httpClient, requestPolicyFactories: this.factories }; } }; /** * Creates a new Pipeline object with Credential provided. * * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. * @param pipelineOptions - Optional. Options. * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { if (!credential) credential = new AnonymousCredential(); const pipeline = new Pipeline([], pipelineOptions); pipeline._credential = credential; return pipeline; } function processDownlevelPipeline(pipeline) { const knownFactoryFunctions = [ isAnonymousCredential, isStorageSharedKeyCredential, isCoreHttpBearerTokenFactory, isStorageBrowserPolicyFactory, isStorageRetryPolicyFactory, isStorageTelemetryPolicyFactory, isCoreHttpPolicyFactory ]; if (pipeline.factories.length) { const novelFactories = pipeline.factories.filter((factory) => { return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); }); if (novelFactories.length) { const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); return { wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), afterRetry: hasInjector }; } } return void 0; } function getCoreClientOptions(pipeline) { var _a$2; const _b$1 = pipeline.options, { httpClient: v1Client } = _b$1, restOptions = tslib.__rest(_b$1, ["httpClient"]); let httpClient = pipeline._coreHttpClient; if (!httpClient) { httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); pipeline._coreHttpClient = httpClient; } let corePipeline = pipeline._corePipeline; if (!corePipeline) { const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, logger: logger.info }, userAgentOptions: { userAgentPrefix }, serializationOptions: { stringifyXML: coreXml.stringifyXML, serializerOptions: { xml: { xmlCharKey: "#" } } }, deserializationOptions: { parseXML: coreXml.parseXML, serializerOptions: { xml: { xmlCharKey: "#" } } } })); corePipeline.removePolicy({ phase: "Retry" }); corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); corePipeline.addPolicy(storageCorrectContentLengthPolicy()); corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); corePipeline.addPolicy(storageBrowserPolicy()); const downlevelResults = processDownlevelPipeline(pipeline); if (downlevelResults) corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : void 0); const credential = getCredentialFromPipeline(pipeline); if (coreAuth.isTokenCredential(credential)) corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ credential, scopes: (_a$2 = restOptions.audience) !== null && _a$2 !== void 0 ? _a$2 : StorageOAuthScopes, challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); else if (credential instanceof StorageSharedKeyCredential) corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); pipeline._corePipeline = corePipeline; } return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); } function getCredentialFromPipeline(pipeline) { if (pipeline._credential) return pipeline._credential; let credential = new AnonymousCredential(); for (const factory of pipeline.factories) if (coreAuth.isTokenCredential(factory.credential)) credential = factory.credential; else if (isStorageSharedKeyCredential(factory)) return factory; return credential; } function isStorageSharedKeyCredential(factory) { if (factory instanceof StorageSharedKeyCredential) return true; return factory.constructor.name === "StorageSharedKeyCredential"; } function isAnonymousCredential(factory) { if (factory instanceof AnonymousCredential) return true; return factory.constructor.name === "AnonymousCredential"; } function isCoreHttpBearerTokenFactory(factory) { return coreAuth.isTokenCredential(factory.credential); } function isStorageBrowserPolicyFactory(factory) { if (factory instanceof StorageBrowserPolicyFactory) return true; return factory.constructor.name === "StorageBrowserPolicyFactory"; } function isStorageRetryPolicyFactory(factory) { if (factory instanceof StorageRetryPolicyFactory) return true; return factory.constructor.name === "StorageRetryPolicyFactory"; } function isStorageTelemetryPolicyFactory(factory) { return factory.constructor.name === "TelemetryPolicyFactory"; } function isInjectorPolicyFactory(factory) { return factory.constructor.name === "InjectorPolicyFactory"; } function isCoreHttpPolicyFactory(factory) { const knownPolicies = [ "GenerateClientRequestIdPolicy", "TracingPolicy", "LogPolicy", "ProxyPolicy", "DisableResponseDecompressionPolicy", "KeepAlivePolicy", "DeserializationPolicy" ]; const mockHttpClient = { sendRequest: async (request) => { return { request, headers: request.headers.clone(), status: 500 }; } }; const mockRequestPolicyOptions$1 = { log(_logLevel, _message) {}, shouldLog(_logLevel) { return false; } }; const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions$1); const policyName = policyInstance.constructor.name; return knownPolicies.some((knownPolicyName) => { return policyName.startsWith(knownPolicyName); }); } const BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { name: "Composite", className: "BlobServiceProperties", modelProperties: { blobAnalyticsLogging: { serializedName: "Logging", xmlName: "Logging", type: { name: "Composite", className: "Logging" } }, hourMetrics: { serializedName: "HourMetrics", xmlName: "HourMetrics", type: { name: "Composite", className: "Metrics" } }, minuteMetrics: { serializedName: "MinuteMetrics", xmlName: "MinuteMetrics", type: { name: "Composite", className: "Metrics" } }, cors: { serializedName: "Cors", xmlName: "Cors", xmlIsWrapped: true, xmlElementName: "CorsRule", type: { name: "Sequence", element: { type: { name: "Composite", className: "CorsRule" } } } }, defaultServiceVersion: { serializedName: "DefaultServiceVersion", xmlName: "DefaultServiceVersion", type: { name: "String" } }, deleteRetentionPolicy: { serializedName: "DeleteRetentionPolicy", xmlName: "DeleteRetentionPolicy", type: { name: "Composite", className: "RetentionPolicy" } }, staticWebsite: { serializedName: "StaticWebsite", xmlName: "StaticWebsite", type: { name: "Composite", className: "StaticWebsite" } } } } }; const Logging = { serializedName: "Logging", type: { name: "Composite", className: "Logging", modelProperties: { version: { serializedName: "Version", required: true, xmlName: "Version", type: { name: "String" } }, deleteProperty: { serializedName: "Delete", required: true, xmlName: "Delete", type: { name: "Boolean" } }, read: { serializedName: "Read", required: true, xmlName: "Read", type: { name: "Boolean" } }, write: { serializedName: "Write", required: true, xmlName: "Write", type: { name: "Boolean" } }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy" } } } } }; const RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy", modelProperties: { enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean" } }, days: { constraints: { InclusiveMinimum: 1 }, serializedName: "Days", xmlName: "Days", type: { name: "Number" } } } } }; const Metrics = { serializedName: "Metrics", type: { name: "Composite", className: "Metrics", modelProperties: { version: { serializedName: "Version", xmlName: "Version", type: { name: "String" } }, enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean" } }, includeAPIs: { serializedName: "IncludeAPIs", xmlName: "IncludeAPIs", type: { name: "Boolean" } }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy" } } } } }; const CorsRule = { serializedName: "CorsRule", type: { name: "Composite", className: "CorsRule", modelProperties: { allowedOrigins: { serializedName: "AllowedOrigins", required: true, xmlName: "AllowedOrigins", type: { name: "String" } }, allowedMethods: { serializedName: "AllowedMethods", required: true, xmlName: "AllowedMethods", type: { name: "String" } }, allowedHeaders: { serializedName: "AllowedHeaders", required: true, xmlName: "AllowedHeaders", type: { name: "String" } }, exposedHeaders: { serializedName: "ExposedHeaders", required: true, xmlName: "ExposedHeaders", type: { name: "String" } }, maxAgeInSeconds: { constraints: { InclusiveMinimum: 0 }, serializedName: "MaxAgeInSeconds", required: true, xmlName: "MaxAgeInSeconds", type: { name: "Number" } } } } }; const StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", className: "StaticWebsite", modelProperties: { enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean" } }, indexDocument: { serializedName: "IndexDocument", xmlName: "IndexDocument", type: { name: "String" } }, errorDocument404Path: { serializedName: "ErrorDocument404Path", xmlName: "ErrorDocument404Path", type: { name: "String" } }, defaultIndexDocumentPath: { serializedName: "DefaultIndexDocumentPath", xmlName: "DefaultIndexDocumentPath", type: { name: "String" } } } } }; const StorageError = { serializedName: "StorageError", type: { name: "Composite", className: "StorageError", modelProperties: { message: { serializedName: "Message", xmlName: "Message", type: { name: "String" } }, code: { serializedName: "Code", xmlName: "Code", type: { name: "String" } }, authenticationErrorDetail: { serializedName: "AuthenticationErrorDetail", xmlName: "AuthenticationErrorDetail", type: { name: "String" } } } } }; const BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { name: "Composite", className: "BlobServiceStatistics", modelProperties: { geoReplication: { serializedName: "GeoReplication", xmlName: "GeoReplication", type: { name: "Composite", className: "GeoReplication" } } } } }; const GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", className: "GeoReplication", modelProperties: { status: { serializedName: "Status", required: true, xmlName: "Status", type: { name: "Enum", allowedValues: [ "live", "bootstrap", "unavailable" ] } }, lastSyncOn: { serializedName: "LastSyncTime", required: true, xmlName: "LastSyncTime", type: { name: "DateTimeRfc1123" } } } } }; const ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListContainersSegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String" } }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String" } }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String" } }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number" } }, containerItems: { serializedName: "ContainerItems", required: true, xmlName: "Containers", xmlIsWrapped: true, xmlElementName: "Container", type: { name: "Sequence", element: { type: { name: "Composite", className: "ContainerItem" } } } }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String" } } } } }; const ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { name: "Composite", className: "ContainerItem", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String" } }, deleted: { serializedName: "Deleted", xmlName: "Deleted", type: { name: "Boolean" } }, version: { serializedName: "Version", xmlName: "Version", type: { name: "String" } }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", className: "ContainerProperties" } }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", value: { type: { name: "String" } } } } } } }; const ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", className: "ContainerProperties", modelProperties: { lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { name: "DateTimeRfc1123" } }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { name: "String" } }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, leaseState: { serializedName: "LeaseState", xmlName: "LeaseState", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, publicAccess: { serializedName: "PublicAccess", xmlName: "PublicAccess", type: { name: "Enum", allowedValues: ["container", "blob"] } }, hasImmutabilityPolicy: { serializedName: "HasImmutabilityPolicy", xmlName: "HasImmutabilityPolicy", type: { name: "Boolean" } }, hasLegalHold: { serializedName: "HasLegalHold", xmlName: "HasLegalHold", type: { name: "Boolean" } }, defaultEncryptionScope: { serializedName: "DefaultEncryptionScope", xmlName: "DefaultEncryptionScope", type: { name: "String" } }, preventEncryptionScopeOverride: { serializedName: "DenyEncryptionScopeOverride", xmlName: "DenyEncryptionScopeOverride", type: { name: "Boolean" } }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { name: "DateTimeRfc1123" } }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { name: "Number" } }, isImmutableStorageWithVersioningEnabled: { serializedName: "ImmutableStorageWithVersioningEnabled", xmlName: "ImmutableStorageWithVersioningEnabled", type: { name: "Boolean" } } } } }; const KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", className: "KeyInfo", modelProperties: { startsOn: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "String" } }, expiresOn: { serializedName: "Expiry", required: true, xmlName: "Expiry", type: { name: "String" } } } } }; const UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", className: "UserDelegationKey", modelProperties: { signedObjectId: { serializedName: "SignedOid", required: true, xmlName: "SignedOid", type: { name: "String" } }, signedTenantId: { serializedName: "SignedTid", required: true, xmlName: "SignedTid", type: { name: "String" } }, signedStartsOn: { serializedName: "SignedStart", required: true, xmlName: "SignedStart", type: { name: "String" } }, signedExpiresOn: { serializedName: "SignedExpiry", required: true, xmlName: "SignedExpiry", type: { name: "String" } }, signedService: { serializedName: "SignedService", required: true, xmlName: "SignedService", type: { name: "String" } }, signedVersion: { serializedName: "SignedVersion", required: true, xmlName: "SignedVersion", type: { name: "String" } }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { name: "String" } } } } }; const FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { name: "Composite", className: "FilterBlobSegment", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String" } }, where: { serializedName: "Where", required: true, xmlName: "Where", type: { name: "String" } }, blobs: { serializedName: "Blobs", required: true, xmlName: "Blobs", xmlIsWrapped: true, xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "FilterBlobItem" } } } }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String" } } } } }; const FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { name: "Composite", className: "FilterBlobItem", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String" } }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", type: { name: "String" } }, tags: { serializedName: "Tags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags" } } } } }; const BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags", modelProperties: { blobTagSet: { serializedName: "BlobTagSet", required: true, xmlName: "TagSet", xmlIsWrapped: true, xmlElementName: "Tag", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobTag" } } } } } } }; const BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { name: "Composite", className: "BlobTag", modelProperties: { key: { serializedName: "Key", required: true, xmlName: "Key", type: { name: "String" } }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { name: "String" } } } } }; const SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { name: "Composite", className: "SignedIdentifier", modelProperties: { id: { serializedName: "Id", required: true, xmlName: "Id", type: { name: "String" } }, accessPolicy: { serializedName: "AccessPolicy", xmlName: "AccessPolicy", type: { name: "Composite", className: "AccessPolicy" } } } } }; const AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", className: "AccessPolicy", modelProperties: { startsOn: { serializedName: "Start", xmlName: "Start", type: { name: "String" } }, expiresOn: { serializedName: "Expiry", xmlName: "Expiry", type: { name: "String" } }, permissions: { serializedName: "Permission", xmlName: "Permission", type: { name: "String" } } } } }; const ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListBlobsFlatSegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String" } }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", xmlIsAttribute: true, type: { name: "String" } }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String" } }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String" } }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number" } }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", className: "BlobFlatListSegment" } }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String" } } } } }; const BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { name: "Composite", className: "BlobFlatListSegment", modelProperties: { blobItems: { serializedName: "BlobItems", required: true, xmlName: "BlobItems", xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobItemInternal" } } } } } } }; const BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { name: "Composite", className: "BlobItemInternal", modelProperties: { name: { serializedName: "Name", xmlName: "Name", type: { name: "Composite", className: "BlobName" } }, deleted: { serializedName: "Deleted", required: true, xmlName: "Deleted", type: { name: "Boolean" } }, snapshot: { serializedName: "Snapshot", required: true, xmlName: "Snapshot", type: { name: "String" } }, versionId: { serializedName: "VersionId", xmlName: "VersionId", type: { name: "String" } }, isCurrentVersion: { serializedName: "IsCurrentVersion", xmlName: "IsCurrentVersion", type: { name: "Boolean" } }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", className: "BlobPropertiesInternal" } }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", value: { type: { name: "String" } } } }, blobTags: { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags" } }, objectReplicationMetadata: { serializedName: "ObjectReplicationMetadata", xmlName: "OrMetadata", type: { name: "Dictionary", value: { type: { name: "String" } } } }, hasVersionsOnly: { serializedName: "HasVersionsOnly", xmlName: "HasVersionsOnly", type: { name: "Boolean" } } } } }; const BlobName = { serializedName: "BlobName", type: { name: "Composite", className: "BlobName", modelProperties: { encoded: { serializedName: "Encoded", xmlName: "Encoded", xmlIsAttribute: true, type: { name: "Boolean" } }, content: { serializedName: "content", xmlName: "content", xmlIsMsText: true, type: { name: "String" } } } } }; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { name: "Composite", className: "BlobPropertiesInternal", modelProperties: { createdOn: { serializedName: "Creation-Time", xmlName: "Creation-Time", type: { name: "DateTimeRfc1123" } }, lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { name: "DateTimeRfc1123" } }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { name: "String" } }, contentLength: { serializedName: "Content-Length", xmlName: "Content-Length", type: { name: "Number" } }, contentType: { serializedName: "Content-Type", xmlName: "Content-Type", type: { name: "String" } }, contentEncoding: { serializedName: "Content-Encoding", xmlName: "Content-Encoding", type: { name: "String" } }, contentLanguage: { serializedName: "Content-Language", xmlName: "Content-Language", type: { name: "String" } }, contentMD5: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { name: "ByteArray" } }, contentDisposition: { serializedName: "Content-Disposition", xmlName: "Content-Disposition", type: { name: "String" } }, cacheControl: { serializedName: "Cache-Control", xmlName: "Cache-Control", type: { name: "String" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, blobType: { serializedName: "BlobType", xmlName: "BlobType", type: { name: "Enum", allowedValues: [ "BlockBlob", "PageBlob", "AppendBlob" ] } }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, leaseState: { serializedName: "LeaseState", xmlName: "LeaseState", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, copyId: { serializedName: "CopyId", xmlName: "CopyId", type: { name: "String" } }, copyStatus: { serializedName: "CopyStatus", xmlName: "CopyStatus", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, copySource: { serializedName: "CopySource", xmlName: "CopySource", type: { name: "String" } }, copyProgress: { serializedName: "CopyProgress", xmlName: "CopyProgress", type: { name: "String" } }, copyCompletedOn: { serializedName: "CopyCompletionTime", xmlName: "CopyCompletionTime", type: { name: "DateTimeRfc1123" } }, copyStatusDescription: { serializedName: "CopyStatusDescription", xmlName: "CopyStatusDescription", type: { name: "String" } }, serverEncrypted: { serializedName: "ServerEncrypted", xmlName: "ServerEncrypted", type: { name: "Boolean" } }, incrementalCopy: { serializedName: "IncrementalCopy", xmlName: "IncrementalCopy", type: { name: "Boolean" } }, destinationSnapshot: { serializedName: "DestinationSnapshot", xmlName: "DestinationSnapshot", type: { name: "String" } }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { name: "DateTimeRfc1123" } }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { name: "Number" } }, accessTier: { serializedName: "AccessTier", xmlName: "AccessTier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold" ] } }, accessTierInferred: { serializedName: "AccessTierInferred", xmlName: "AccessTierInferred", type: { name: "Boolean" } }, archiveStatus: { serializedName: "ArchiveStatus", xmlName: "ArchiveStatus", type: { name: "Enum", allowedValues: [ "rehydrate-pending-to-hot", "rehydrate-pending-to-cool", "rehydrate-pending-to-cold" ] } }, customerProvidedKeySha256: { serializedName: "CustomerProvidedKeySha256", xmlName: "CustomerProvidedKeySha256", type: { name: "String" } }, encryptionScope: { serializedName: "EncryptionScope", xmlName: "EncryptionScope", type: { name: "String" } }, accessTierChangedOn: { serializedName: "AccessTierChangeTime", xmlName: "AccessTierChangeTime", type: { name: "DateTimeRfc1123" } }, tagCount: { serializedName: "TagCount", xmlName: "TagCount", type: { name: "Number" } }, expiresOn: { serializedName: "Expiry-Time", xmlName: "Expiry-Time", type: { name: "DateTimeRfc1123" } }, isSealed: { serializedName: "Sealed", xmlName: "Sealed", type: { name: "Boolean" } }, rehydratePriority: { serializedName: "RehydratePriority", xmlName: "RehydratePriority", type: { name: "Enum", allowedValues: ["High", "Standard"] } }, lastAccessedOn: { serializedName: "LastAccessTime", xmlName: "LastAccessTime", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyExpiresOn: { serializedName: "ImmutabilityPolicyUntilDate", xmlName: "ImmutabilityPolicyUntilDate", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyMode: { serializedName: "ImmutabilityPolicyMode", xmlName: "ImmutabilityPolicyMode", type: { name: "Enum", allowedValues: [ "Mutable", "Unlocked", "Locked" ] } }, legalHold: { serializedName: "LegalHold", xmlName: "LegalHold", type: { name: "Boolean" } } } } }; const ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListBlobsHierarchySegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String" } }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", xmlIsAttribute: true, type: { name: "String" } }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String" } }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String" } }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number" } }, delimiter: { serializedName: "Delimiter", xmlName: "Delimiter", type: { name: "String" } }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", className: "BlobHierarchyListSegment" } }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String" } } } } }; const BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { name: "Composite", className: "BlobHierarchyListSegment", modelProperties: { blobPrefixes: { serializedName: "BlobPrefixes", xmlName: "BlobPrefixes", xmlElementName: "BlobPrefix", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobPrefix" } } } }, blobItems: { serializedName: "BlobItems", required: true, xmlName: "BlobItems", xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobItemInternal" } } } } } } }; const BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", className: "BlobPrefix", modelProperties: { name: { serializedName: "Name", xmlName: "Name", type: { name: "Composite", className: "BlobName" } } } } }; const BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { name: "Composite", className: "BlockLookupList", modelProperties: { committed: { serializedName: "Committed", xmlName: "Committed", xmlElementName: "Committed", type: { name: "Sequence", element: { type: { name: "String" } } } }, uncommitted: { serializedName: "Uncommitted", xmlName: "Uncommitted", xmlElementName: "Uncommitted", type: { name: "Sequence", element: { type: { name: "String" } } } }, latest: { serializedName: "Latest", xmlName: "Latest", xmlElementName: "Latest", type: { name: "Sequence", element: { type: { name: "String" } } } } } } }; const BlockList = { serializedName: "BlockList", type: { name: "Composite", className: "BlockList", modelProperties: { committedBlocks: { serializedName: "CommittedBlocks", xmlName: "CommittedBlocks", xmlIsWrapped: true, xmlElementName: "Block", type: { name: "Sequence", element: { type: { name: "Composite", className: "Block" } } } }, uncommittedBlocks: { serializedName: "UncommittedBlocks", xmlName: "UncommittedBlocks", xmlIsWrapped: true, xmlElementName: "Block", type: { name: "Sequence", element: { type: { name: "Composite", className: "Block" } } } } } } }; const Block = { serializedName: "Block", type: { name: "Composite", className: "Block", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String" } }, size: { serializedName: "Size", required: true, xmlName: "Size", type: { name: "Number" } } } } }; const PageList = { serializedName: "PageList", type: { name: "Composite", className: "PageList", modelProperties: { pageRange: { serializedName: "PageRange", xmlName: "PageRange", xmlElementName: "PageRange", type: { name: "Sequence", element: { type: { name: "Composite", className: "PageRange" } } } }, clearRange: { serializedName: "ClearRange", xmlName: "ClearRange", xmlElementName: "ClearRange", type: { name: "Sequence", element: { type: { name: "Composite", className: "ClearRange" } } } }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String" } } } } }; const PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { name: "Composite", className: "PageRange", modelProperties: { start: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "Number" } }, end: { serializedName: "End", required: true, xmlName: "End", type: { name: "Number" } } } } }; const ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { name: "Composite", className: "ClearRange", modelProperties: { start: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "Number" } }, end: { serializedName: "End", required: true, xmlName: "End", type: { name: "Number" } } } } }; const QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { name: "Composite", className: "QueryRequest", modelProperties: { queryType: { serializedName: "QueryType", required: true, xmlName: "QueryType", type: { name: "String" } }, expression: { serializedName: "Expression", required: true, xmlName: "Expression", type: { name: "String" } }, inputSerialization: { serializedName: "InputSerialization", xmlName: "InputSerialization", type: { name: "Composite", className: "QuerySerialization" } }, outputSerialization: { serializedName: "OutputSerialization", xmlName: "OutputSerialization", type: { name: "Composite", className: "QuerySerialization" } } } } }; const QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", className: "QuerySerialization", modelProperties: { format: { serializedName: "Format", xmlName: "Format", type: { name: "Composite", className: "QueryFormat" } } } } }; const QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", className: "QueryFormat", modelProperties: { type: { serializedName: "Type", required: true, xmlName: "Type", type: { name: "Enum", allowedValues: [ "delimited", "json", "arrow", "parquet" ] } }, delimitedTextConfiguration: { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", className: "DelimitedTextConfiguration" } }, jsonTextConfiguration: { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", className: "JsonTextConfiguration" } }, arrowConfiguration: { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", className: "ArrowConfiguration" } }, parquetTextConfiguration: { serializedName: "ParquetTextConfiguration", xmlName: "ParquetTextConfiguration", type: { name: "Dictionary", value: { type: { name: "any" } } } } } } }; const DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", className: "DelimitedTextConfiguration", modelProperties: { columnSeparator: { serializedName: "ColumnSeparator", xmlName: "ColumnSeparator", type: { name: "String" } }, fieldQuote: { serializedName: "FieldQuote", xmlName: "FieldQuote", type: { name: "String" } }, recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { name: "String" } }, escapeChar: { serializedName: "EscapeChar", xmlName: "EscapeChar", type: { name: "String" } }, headersPresent: { serializedName: "HeadersPresent", xmlName: "HasHeaders", type: { name: "Boolean" } } } } }; const JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", className: "JsonTextConfiguration", modelProperties: { recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { name: "String" } } } } }; const ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", className: "ArrowConfiguration", modelProperties: { schema: { serializedName: "Schema", required: true, xmlName: "Schema", xmlIsWrapped: true, xmlElementName: "Field", type: { name: "Sequence", element: { type: { name: "Composite", className: "ArrowField" } } } } } } }; const ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { name: "Composite", className: "ArrowField", modelProperties: { type: { serializedName: "Type", required: true, xmlName: "Type", type: { name: "String" } }, name: { serializedName: "Name", xmlName: "Name", type: { name: "String" } }, precision: { serializedName: "Precision", xmlName: "Precision", type: { name: "Number" } }, scale: { serializedName: "Scale", xmlName: "Scale", type: { name: "Number" } } } } }; const ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", className: "ServiceSetPropertiesHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", className: "ServiceSetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", className: "ServiceGetPropertiesHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", className: "ServiceGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", className: "ServiceGetStatisticsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", className: "ServiceGetStatisticsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", className: "ServiceListContainersSegmentHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", className: "ServiceListContainersSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", className: "ServiceGetUserDelegationKeyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", className: "ServiceGetUserDelegationKeyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", className: "ServiceGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS" ] } }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage" ] } }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "ServiceGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", className: "ServiceSubmitBatchHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", className: "ServiceSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", className: "ServiceFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", className: "ServiceFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", className: "ContainerCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", className: "ContainerCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", className: "ContainerGetPropertiesHeaders", modelProperties: { metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } } } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"] } }, hasImmutabilityPolicy: { serializedName: "x-ms-has-immutability-policy", xmlName: "x-ms-has-immutability-policy", type: { name: "Boolean" } }, hasLegalHold: { serializedName: "x-ms-has-legal-hold", xmlName: "x-ms-has-legal-hold", type: { name: "Boolean" } }, defaultEncryptionScope: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { name: "String" } }, denyEncryptionScopeOverride: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { name: "Boolean" } }, isImmutableStorageWithVersioningEnabled: { serializedName: "x-ms-immutable-storage-with-versioning-enabled", xmlName: "x-ms-immutable-storage-with-versioning-enabled", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", className: "ContainerGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", className: "ContainerDeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", className: "ContainerDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", className: "ContainerSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", className: "ContainerSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", className: "ContainerGetAccessPolicyHeaders", modelProperties: { blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"] } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", className: "ContainerGetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", className: "ContainerSetAccessPolicyHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", className: "ContainerSetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", className: "ContainerRestoreHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", className: "ContainerRestoreExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", className: "ContainerRenameHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", className: "ContainerRenameExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", className: "ContainerSubmitBatchHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } } } } }; const ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", className: "ContainerSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", className: "ContainerFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", className: "ContainerFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", className: "ContainerAcquireLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", className: "ContainerReleaseLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", className: "ContainerRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", className: "ContainerBreakLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", className: "ContainerChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", className: "ContainerListBlobFlatSegmentHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", className: "ContainerListBlobFlatSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", className: "ContainerListBlobHierarchySegmentHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", className: "ContainerListBlobHierarchySegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", className: "ContainerGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS" ] } }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage" ] } }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean" } } } } }; const ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "ContainerGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", className: "BlobDownloadHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { name: "DateTimeRfc1123" } }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } } } }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { name: "String" } }, objectReplicationRules: { serializedName: "x-ms-or", headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", value: { type: { name: "String" } } } }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number" } }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { name: "String" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String" } }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String" } }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String" } }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: [ "BlockBlob", "PageBlob", "AppendBlob" ] } }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123" } }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String" } }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String" } }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { name: "Boolean" } }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number" } }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray" } }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { name: "Number" } }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean" } }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: [ "Mutable", "Unlocked", "Locked" ] } }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } } } } }; const BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", className: "BlobDownloadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", className: "BlobGetPropertiesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { name: "DateTimeRfc1123" } }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } } } }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { name: "String" } }, objectReplicationRules: { serializedName: "x-ms-or", headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", value: { type: { name: "String" } } } }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: [ "BlockBlob", "PageBlob", "AppendBlob" ] } }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123" } }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String" } }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String" } }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, isIncrementalCopy: { serializedName: "x-ms-incremental-copy", xmlName: "x-ms-incremental-copy", type: { name: "Boolean" } }, destinationSnapshot: { serializedName: "x-ms-copy-destination-snapshot", xmlName: "x-ms-copy-destination-snapshot", type: { name: "String" } }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number" } }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String" } }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String" } }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String" } }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String" } }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number" } }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, accessTier: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { name: "String" } }, accessTierInferred: { serializedName: "x-ms-access-tier-inferred", xmlName: "x-ms-access-tier-inferred", type: { name: "Boolean" } }, archiveStatus: { serializedName: "x-ms-archive-status", xmlName: "x-ms-archive-status", type: { name: "String" } }, accessTierChangedOn: { serializedName: "x-ms-access-tier-change-time", xmlName: "x-ms-access-tier-change-time", type: { name: "DateTimeRfc1123" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { name: "Boolean" } }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { name: "Number" } }, expiresOn: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { name: "DateTimeRfc1123" } }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean" } }, rehydratePriority: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", allowedValues: ["High", "Standard"] } }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: [ "Mutable", "Unlocked", "Locked" ] } }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", className: "BlobGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", type: { name: "Composite", className: "BlobDeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", className: "BlobDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", type: { name: "Composite", className: "BlobUndeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", className: "BlobUndeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", className: "BlobSetExpiryHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", className: "BlobSetExpiryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", className: "BlobSetHttpHeadersHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", className: "BlobSetHttpHeadersExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", type: { name: "Composite", className: "BlobSetImmutabilityPolicyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyExpiry: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123" } }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: [ "Mutable", "Unlocked", "Locked" ] } } } } }; const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", type: { name: "Composite", className: "BlobSetImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", type: { name: "Composite", className: "BlobDeleteImmutabilityPolicyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", type: { name: "Composite", className: "BlobDeleteImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", type: { name: "Composite", className: "BlobSetLegalHoldHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean" } } } } }; const BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", type: { name: "Composite", className: "BlobSetLegalHoldExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", className: "BlobSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", className: "BlobSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", className: "BlobAcquireLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", className: "BlobAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", className: "BlobReleaseLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", className: "BlobReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", className: "BlobRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", className: "BlobRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", className: "BlobChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", className: "BlobChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", className: "BlobBreakLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } } } } }; const BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", className: "BlobBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", className: "BlobCreateSnapshotHeaders", modelProperties: { snapshot: { serializedName: "x-ms-snapshot", xmlName: "x-ms-snapshot", type: { name: "String" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", className: "BlobCreateSnapshotExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", className: "BlobStartCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobStartCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", className: "BlobCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyStatus: { defaultValue: "success", isConstant: true, serializedName: "x-ms-copy-status", type: { name: "String" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", className: "BlobAbortCopyFromURLHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobAbortCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", type: { name: "Composite", className: "BlobSetTierHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", className: "BlobSetTierExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", className: "BlobGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS" ] } }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage" ] } }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean" } } } } }; const BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "BlobGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobQueryHeaders = { serializedName: "Blob_queryHeaders", type: { name: "Composite", className: "BlobQueryHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } } } }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number" } }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { name: "String" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String" } }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String" } }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String" } }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: [ "BlockBlob", "PageBlob", "AppendBlob" ] } }, copyCompletionTime: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123" } }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String" } }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String" } }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"] } }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken" ] } }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"] } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number" } }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } } } } }; const BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", className: "BlobQueryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", type: { name: "Composite", className: "BlobGetTagsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", className: "BlobGetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", type: { name: "Composite", className: "BlobSetTagsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", className: "BlobSetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", type: { name: "Composite", className: "PageBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", className: "PageBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", className: "PageBlobUploadPagesHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", className: "PageBlobUploadPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", className: "PageBlobClearPagesHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", className: "PageBlobClearPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", className: "PageBlobUploadPagesFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", className: "PageBlobUploadPagesFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesDiffHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesDiffExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", type: { name: "Composite", className: "PageBlobResizeHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", className: "PageBlobResizeExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", className: "PageBlobUpdateSequenceNumberHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", className: "PageBlobUpdateSequenceNumberExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", className: "PageBlobCopyIncrementalHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String" } }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: [ "pending", "success", "aborted", "failed" ] } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", className: "PageBlobCopyIncrementalExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", type: { name: "Composite", className: "AppendBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", className: "AppendBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { name: "String" } }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { name: "String" } }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", className: "AppendBlobSealHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean" } } } } }; const AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", className: "AppendBlobSealExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", className: "BlockBlobUploadHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", className: "BlockBlobUploadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", className: "BlockBlobPutBlobFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", className: "BlockBlobPutBlobFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", className: "BlockBlobStageBlockHeaders", modelProperties: { contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", className: "BlockBlobStageBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", className: "BlockBlobStageBlockFromURLHeaders", modelProperties: { contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", className: "BlockBlobStageBlockFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", className: "BlockBlobCommitBlockListHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray" } }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean" } }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", className: "BlockBlobCommitBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", className: "BlockBlobGetBlockListHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123" } }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String" } }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String" } }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number" } }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String" } }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String" } }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123" } }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; const BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", type: { name: "Composite", className: "BlockBlobGetBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String" } } } } }; var Mappers = /* @__PURE__ */ Object.freeze({ __proto__: null, AccessPolicy, AppendBlobAppendBlockExceptionHeaders, AppendBlobAppendBlockFromUrlExceptionHeaders, AppendBlobAppendBlockFromUrlHeaders, AppendBlobAppendBlockHeaders, AppendBlobCreateExceptionHeaders, AppendBlobCreateHeaders, AppendBlobSealExceptionHeaders, AppendBlobSealHeaders, ArrowConfiguration, ArrowField, BlobAbortCopyFromURLExceptionHeaders, BlobAbortCopyFromURLHeaders, BlobAcquireLeaseExceptionHeaders, BlobAcquireLeaseHeaders, BlobBreakLeaseExceptionHeaders, BlobBreakLeaseHeaders, BlobChangeLeaseExceptionHeaders, BlobChangeLeaseHeaders, BlobCopyFromURLExceptionHeaders, BlobCopyFromURLHeaders, BlobCreateSnapshotExceptionHeaders, BlobCreateSnapshotHeaders, BlobDeleteExceptionHeaders, BlobDeleteHeaders, BlobDeleteImmutabilityPolicyExceptionHeaders, BlobDeleteImmutabilityPolicyHeaders, BlobDownloadExceptionHeaders, BlobDownloadHeaders, BlobFlatListSegment, BlobGetAccountInfoExceptionHeaders, BlobGetAccountInfoHeaders, BlobGetPropertiesExceptionHeaders, BlobGetPropertiesHeaders, BlobGetTagsExceptionHeaders, BlobGetTagsHeaders, BlobHierarchyListSegment, BlobItemInternal, BlobName, BlobPrefix, BlobPropertiesInternal, BlobQueryExceptionHeaders, BlobQueryHeaders, BlobReleaseLeaseExceptionHeaders, BlobReleaseLeaseHeaders, BlobRenewLeaseExceptionHeaders, BlobRenewLeaseHeaders, BlobServiceProperties, BlobServiceStatistics, BlobSetExpiryExceptionHeaders, BlobSetExpiryHeaders, BlobSetHttpHeadersExceptionHeaders, BlobSetHttpHeadersHeaders, BlobSetImmutabilityPolicyExceptionHeaders, BlobSetImmutabilityPolicyHeaders, BlobSetLegalHoldExceptionHeaders, BlobSetLegalHoldHeaders, BlobSetMetadataExceptionHeaders, BlobSetMetadataHeaders, BlobSetTagsExceptionHeaders, BlobSetTagsHeaders, BlobSetTierExceptionHeaders, BlobSetTierHeaders, BlobStartCopyFromURLExceptionHeaders, BlobStartCopyFromURLHeaders, BlobTag, BlobTags, BlobUndeleteExceptionHeaders, BlobUndeleteHeaders, Block, BlockBlobCommitBlockListExceptionHeaders, BlockBlobCommitBlockListHeaders, BlockBlobGetBlockListExceptionHeaders, BlockBlobGetBlockListHeaders, BlockBlobPutBlobFromUrlExceptionHeaders, BlockBlobPutBlobFromUrlHeaders, BlockBlobStageBlockExceptionHeaders, BlockBlobStageBlockFromURLExceptionHeaders, BlockBlobStageBlockFromURLHeaders, BlockBlobStageBlockHeaders, BlockBlobUploadExceptionHeaders, BlockBlobUploadHeaders, BlockList, BlockLookupList, ClearRange, ContainerAcquireLeaseExceptionHeaders, ContainerAcquireLeaseHeaders, ContainerBreakLeaseExceptionHeaders, ContainerBreakLeaseHeaders, ContainerChangeLeaseExceptionHeaders, ContainerChangeLeaseHeaders, ContainerCreateExceptionHeaders, ContainerCreateHeaders, ContainerDeleteExceptionHeaders, ContainerDeleteHeaders, ContainerFilterBlobsExceptionHeaders, ContainerFilterBlobsHeaders, ContainerGetAccessPolicyExceptionHeaders, ContainerGetAccessPolicyHeaders, ContainerGetAccountInfoExceptionHeaders, ContainerGetAccountInfoHeaders, ContainerGetPropertiesExceptionHeaders, ContainerGetPropertiesHeaders, ContainerItem, ContainerListBlobFlatSegmentExceptionHeaders, ContainerListBlobFlatSegmentHeaders, ContainerListBlobHierarchySegmentExceptionHeaders, ContainerListBlobHierarchySegmentHeaders, ContainerProperties, ContainerReleaseLeaseExceptionHeaders, ContainerReleaseLeaseHeaders, ContainerRenameExceptionHeaders, ContainerRenameHeaders, ContainerRenewLeaseExceptionHeaders, ContainerRenewLeaseHeaders, ContainerRestoreExceptionHeaders, ContainerRestoreHeaders, ContainerSetAccessPolicyExceptionHeaders, ContainerSetAccessPolicyHeaders, ContainerSetMetadataExceptionHeaders, ContainerSetMetadataHeaders, ContainerSubmitBatchExceptionHeaders, ContainerSubmitBatchHeaders, CorsRule, DelimitedTextConfiguration, FilterBlobItem, FilterBlobSegment, GeoReplication, JsonTextConfiguration, KeyInfo, ListBlobsFlatSegmentResponse, ListBlobsHierarchySegmentResponse, ListContainersSegmentResponse, Logging, Metrics, PageBlobClearPagesExceptionHeaders, PageBlobClearPagesHeaders, PageBlobCopyIncrementalExceptionHeaders, PageBlobCopyIncrementalHeaders, PageBlobCreateExceptionHeaders, PageBlobCreateHeaders, PageBlobGetPageRangesDiffExceptionHeaders, PageBlobGetPageRangesDiffHeaders, PageBlobGetPageRangesExceptionHeaders, PageBlobGetPageRangesHeaders, PageBlobResizeExceptionHeaders, PageBlobResizeHeaders, PageBlobUpdateSequenceNumberExceptionHeaders, PageBlobUpdateSequenceNumberHeaders, PageBlobUploadPagesExceptionHeaders, PageBlobUploadPagesFromURLExceptionHeaders, PageBlobUploadPagesFromURLHeaders, PageBlobUploadPagesHeaders, PageList, PageRange, QueryFormat, QueryRequest, QuerySerialization, RetentionPolicy, ServiceFilterBlobsExceptionHeaders, ServiceFilterBlobsHeaders, ServiceGetAccountInfoExceptionHeaders, ServiceGetAccountInfoHeaders, ServiceGetPropertiesExceptionHeaders, ServiceGetPropertiesHeaders, ServiceGetStatisticsExceptionHeaders, ServiceGetStatisticsHeaders, ServiceGetUserDelegationKeyExceptionHeaders, ServiceGetUserDelegationKeyHeaders, ServiceListContainersSegmentExceptionHeaders, ServiceListContainersSegmentHeaders, ServiceSetPropertiesExceptionHeaders, ServiceSetPropertiesHeaders, ServiceSubmitBatchExceptionHeaders, ServiceSubmitBatchHeaders, SignedIdentifier, StaticWebsite, StorageError, UserDelegationKey }); const contentType = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Content-Type", type: { name: "String" } } }; const blobServiceProperties = { parameterPath: "blobServiceProperties", mapper: BlobServiceProperties }; const accept = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String" } } }; const url = { parameterPath: "url", mapper: { serializedName: "url", required: true, xmlName: "url", type: { name: "String" } }, skipEncoding: true }; const restype = { parameterPath: "restype", mapper: { defaultValue: "service", isConstant: true, serializedName: "restype", type: { name: "String" } } }; const comp = { parameterPath: "comp", mapper: { defaultValue: "properties", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { InclusiveMinimum: 0 }, serializedName: "timeout", xmlName: "timeout", type: { name: "Number" } } }; const version = { parameterPath: "version", mapper: { defaultValue: "2025-05-05", isConstant: true, serializedName: "x-ms-version", type: { name: "String" } } }; const requestId = { parameterPath: ["options", "requestId"], mapper: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String" } } }; const accept1 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String" } } }; const comp1 = { parameterPath: "comp", mapper: { defaultValue: "stats", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const comp2 = { parameterPath: "comp", mapper: { defaultValue: "list", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const prefix = { parameterPath: ["options", "prefix"], mapper: { serializedName: "prefix", xmlName: "prefix", type: { name: "String" } } }; const marker = { parameterPath: ["options", "marker"], mapper: { serializedName: "marker", xmlName: "marker", type: { name: "String" } } }; const maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { InclusiveMinimum: 1 }, serializedName: "maxresults", xmlName: "maxresults", type: { name: "Number" } } }; const include = { parameterPath: ["options", "include"], mapper: { serializedName: "include", xmlName: "include", xmlElementName: "ListContainersIncludeType", type: { name: "Sequence", element: { type: { name: "Enum", allowedValues: [ "metadata", "deleted", "system" ] } } } }, collectionFormat: "CSV" }; const keyInfo = { parameterPath: "keyInfo", mapper: KeyInfo }; const comp3 = { parameterPath: "comp", mapper: { defaultValue: "userdelegationkey", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const restype1 = { parameterPath: "restype", mapper: { defaultValue: "account", isConstant: true, serializedName: "restype", type: { name: "String" } } }; const body = { parameterPath: "body", mapper: { serializedName: "body", required: true, xmlName: "body", type: { name: "Stream" } } }; const comp4 = { parameterPath: "comp", mapper: { defaultValue: "batch", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const contentLength = { parameterPath: "contentLength", mapper: { serializedName: "Content-Length", required: true, xmlName: "Content-Length", type: { name: "Number" } } }; const multipartContentType = { parameterPath: "multipartContentType", mapper: { serializedName: "Content-Type", required: true, xmlName: "Content-Type", type: { name: "String" } } }; const comp5 = { parameterPath: "comp", mapper: { defaultValue: "blobs", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const where = { parameterPath: ["options", "where"], mapper: { serializedName: "where", xmlName: "where", type: { name: "String" } } }; const restype2 = { parameterPath: "restype", mapper: { defaultValue: "container", isConstant: true, serializedName: "restype", type: { name: "String" } } }; const metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", xmlName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", type: { name: "Dictionary", value: { type: { name: "String" } } } } }; const access = { parameterPath: ["options", "access"], mapper: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"] } } }; const defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", "defaultEncryptionScope" ], mapper: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { name: "String" } } }; const preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", "preventEncryptionScopeOverride" ], mapper: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { name: "Boolean" } } }; const leaseId = { parameterPath: [ "options", "leaseAccessConditions", "leaseId" ], mapper: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String" } } }; const ifModifiedSince = { parameterPath: [ "options", "modifiedAccessConditions", "ifModifiedSince" ], mapper: { serializedName: "If-Modified-Since", xmlName: "If-Modified-Since", type: { name: "DateTimeRfc1123" } } }; const ifUnmodifiedSince = { parameterPath: [ "options", "modifiedAccessConditions", "ifUnmodifiedSince" ], mapper: { serializedName: "If-Unmodified-Since", xmlName: "If-Unmodified-Since", type: { name: "DateTimeRfc1123" } } }; const comp6 = { parameterPath: "comp", mapper: { defaultValue: "metadata", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const comp7 = { parameterPath: "comp", mapper: { defaultValue: "acl", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const containerAcl = { parameterPath: ["options", "containerAcl"], mapper: { serializedName: "containerAcl", xmlName: "SignedIdentifiers", xmlIsWrapped: true, xmlElementName: "SignedIdentifier", type: { name: "Sequence", element: { type: { name: "Composite", className: "SignedIdentifier" } } } } }; const comp8 = { parameterPath: "comp", mapper: { defaultValue: "undelete", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const deletedContainerName = { parameterPath: ["options", "deletedContainerName"], mapper: { serializedName: "x-ms-deleted-container-name", xmlName: "x-ms-deleted-container-name", type: { name: "String" } } }; const deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], mapper: { serializedName: "x-ms-deleted-container-version", xmlName: "x-ms-deleted-container-version", type: { name: "String" } } }; const comp9 = { parameterPath: "comp", mapper: { defaultValue: "rename", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const sourceContainerName = { parameterPath: "sourceContainerName", mapper: { serializedName: "x-ms-source-container-name", required: true, xmlName: "x-ms-source-container-name", type: { name: "String" } } }; const sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], mapper: { serializedName: "x-ms-source-lease-id", xmlName: "x-ms-source-lease-id", type: { name: "String" } } }; const comp10 = { parameterPath: "comp", mapper: { defaultValue: "lease", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const action = { parameterPath: "action", mapper: { defaultValue: "acquire", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String" } } }; const duration = { parameterPath: ["options", "duration"], mapper: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Number" } } }; const proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], mapper: { serializedName: "x-ms-proposed-lease-id", xmlName: "x-ms-proposed-lease-id", type: { name: "String" } } }; const action1 = { parameterPath: "action", mapper: { defaultValue: "release", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String" } } }; const leaseId1 = { parameterPath: "leaseId", mapper: { serializedName: "x-ms-lease-id", required: true, xmlName: "x-ms-lease-id", type: { name: "String" } } }; const action2 = { parameterPath: "action", mapper: { defaultValue: "renew", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String" } } }; const action3 = { parameterPath: "action", mapper: { defaultValue: "break", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String" } } }; const breakPeriod = { parameterPath: ["options", "breakPeriod"], mapper: { serializedName: "x-ms-lease-break-period", xmlName: "x-ms-lease-break-period", type: { name: "Number" } } }; const action4 = { parameterPath: "action", mapper: { defaultValue: "change", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String" } } }; const proposedLeaseId1 = { parameterPath: "proposedLeaseId", mapper: { serializedName: "x-ms-proposed-lease-id", required: true, xmlName: "x-ms-proposed-lease-id", type: { name: "String" } } }; const include1 = { parameterPath: ["options", "include"], mapper: { serializedName: "include", xmlName: "include", xmlElementName: "ListBlobsIncludeItem", type: { name: "Sequence", element: { type: { name: "Enum", allowedValues: [ "copy", "deleted", "metadata", "snapshots", "uncommittedblobs", "versions", "tags", "immutabilitypolicy", "legalhold", "deletedwithversions" ] } } } }, collectionFormat: "CSV" }; const delimiter = { parameterPath: "delimiter", mapper: { serializedName: "delimiter", required: true, xmlName: "delimiter", type: { name: "String" } } }; const snapshot = { parameterPath: ["options", "snapshot"], mapper: { serializedName: "snapshot", xmlName: "snapshot", type: { name: "String" } } }; const versionId = { parameterPath: ["options", "versionId"], mapper: { serializedName: "versionid", xmlName: "versionid", type: { name: "String" } } }; const range = { parameterPath: ["options", "range"], mapper: { serializedName: "x-ms-range", xmlName: "x-ms-range", type: { name: "String" } } }; const rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], mapper: { serializedName: "x-ms-range-get-content-md5", xmlName: "x-ms-range-get-content-md5", type: { name: "Boolean" } } }; const rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], mapper: { serializedName: "x-ms-range-get-content-crc64", xmlName: "x-ms-range-get-content-crc64", type: { name: "Boolean" } } }; const encryptionKey = { parameterPath: [ "options", "cpkInfo", "encryptionKey" ], mapper: { serializedName: "x-ms-encryption-key", xmlName: "x-ms-encryption-key", type: { name: "String" } } }; const encryptionKeySha256 = { parameterPath: [ "options", "cpkInfo", "encryptionKeySha256" ], mapper: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String" } } }; const encryptionAlgorithm = { parameterPath: [ "options", "cpkInfo", "encryptionAlgorithm" ], mapper: { serializedName: "x-ms-encryption-algorithm", xmlName: "x-ms-encryption-algorithm", type: { name: "String" } } }; const ifMatch = { parameterPath: [ "options", "modifiedAccessConditions", "ifMatch" ], mapper: { serializedName: "If-Match", xmlName: "If-Match", type: { name: "String" } } }; const ifNoneMatch = { parameterPath: [ "options", "modifiedAccessConditions", "ifNoneMatch" ], mapper: { serializedName: "If-None-Match", xmlName: "If-None-Match", type: { name: "String" } } }; const ifTags = { parameterPath: [ "options", "modifiedAccessConditions", "ifTags" ], mapper: { serializedName: "x-ms-if-tags", xmlName: "x-ms-if-tags", type: { name: "String" } } }; const deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], mapper: { serializedName: "x-ms-delete-snapshots", xmlName: "x-ms-delete-snapshots", type: { name: "Enum", allowedValues: ["include", "only"] } } }; const blobDeleteType = { parameterPath: ["options", "blobDeleteType"], mapper: { serializedName: "deletetype", xmlName: "deletetype", type: { name: "String" } } }; const comp11 = { parameterPath: "comp", mapper: { defaultValue: "expiry", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const expiryOptions = { parameterPath: "expiryOptions", mapper: { serializedName: "x-ms-expiry-option", required: true, xmlName: "x-ms-expiry-option", type: { name: "String" } } }; const expiresOn = { parameterPath: ["options", "expiresOn"], mapper: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { name: "String" } } }; const blobCacheControl = { parameterPath: [ "options", "blobHttpHeaders", "blobCacheControl" ], mapper: { serializedName: "x-ms-blob-cache-control", xmlName: "x-ms-blob-cache-control", type: { name: "String" } } }; const blobContentType = { parameterPath: [ "options", "blobHttpHeaders", "blobContentType" ], mapper: { serializedName: "x-ms-blob-content-type", xmlName: "x-ms-blob-content-type", type: { name: "String" } } }; const blobContentMD5 = { parameterPath: [ "options", "blobHttpHeaders", "blobContentMD5" ], mapper: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray" } } }; const blobContentEncoding = { parameterPath: [ "options", "blobHttpHeaders", "blobContentEncoding" ], mapper: { serializedName: "x-ms-blob-content-encoding", xmlName: "x-ms-blob-content-encoding", type: { name: "String" } } }; const blobContentLanguage = { parameterPath: [ "options", "blobHttpHeaders", "blobContentLanguage" ], mapper: { serializedName: "x-ms-blob-content-language", xmlName: "x-ms-blob-content-language", type: { name: "String" } } }; const blobContentDisposition = { parameterPath: [ "options", "blobHttpHeaders", "blobContentDisposition" ], mapper: { serializedName: "x-ms-blob-content-disposition", xmlName: "x-ms-blob-content-disposition", type: { name: "String" } } }; const comp12 = { parameterPath: "comp", mapper: { defaultValue: "immutabilityPolicies", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const immutabilityPolicyExpiry = { parameterPath: ["options", "immutabilityPolicyExpiry"], mapper: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123" } } }; const immutabilityPolicyMode = { parameterPath: ["options", "immutabilityPolicyMode"], mapper: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: [ "Mutable", "Unlocked", "Locked" ] } } }; const comp13 = { parameterPath: "comp", mapper: { defaultValue: "legalhold", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const legalHold = { parameterPath: "legalHold", mapper: { serializedName: "x-ms-legal-hold", required: true, xmlName: "x-ms-legal-hold", type: { name: "Boolean" } } }; const encryptionScope = { parameterPath: ["options", "encryptionScope"], mapper: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String" } } }; const comp14 = { parameterPath: "comp", mapper: { defaultValue: "snapshot", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const tier = { parameterPath: ["options", "tier"], mapper: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold" ] } } }; const rehydratePriority = { parameterPath: ["options", "rehydratePriority"], mapper: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", allowedValues: ["High", "Standard"] } } }; const sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfModifiedSince" ], mapper: { serializedName: "x-ms-source-if-modified-since", xmlName: "x-ms-source-if-modified-since", type: { name: "DateTimeRfc1123" } } }; const sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfUnmodifiedSince" ], mapper: { serializedName: "x-ms-source-if-unmodified-since", xmlName: "x-ms-source-if-unmodified-since", type: { name: "DateTimeRfc1123" } } }; const sourceIfMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfMatch" ], mapper: { serializedName: "x-ms-source-if-match", xmlName: "x-ms-source-if-match", type: { name: "String" } } }; const sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfNoneMatch" ], mapper: { serializedName: "x-ms-source-if-none-match", xmlName: "x-ms-source-if-none-match", type: { name: "String" } } }; const sourceIfTags = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfTags" ], mapper: { serializedName: "x-ms-source-if-tags", xmlName: "x-ms-source-if-tags", type: { name: "String" } } }; const copySource = { parameterPath: "copySource", mapper: { serializedName: "x-ms-copy-source", required: true, xmlName: "x-ms-copy-source", type: { name: "String" } } }; const blobTagsString = { parameterPath: ["options", "blobTagsString"], mapper: { serializedName: "x-ms-tags", xmlName: "x-ms-tags", type: { name: "String" } } }; const sealBlob = { parameterPath: ["options", "sealBlob"], mapper: { serializedName: "x-ms-seal-blob", xmlName: "x-ms-seal-blob", type: { name: "Boolean" } } }; const legalHold1 = { parameterPath: ["options", "legalHold"], mapper: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean" } } }; const xMsRequiresSync = { parameterPath: "xMsRequiresSync", mapper: { defaultValue: "true", isConstant: true, serializedName: "x-ms-requires-sync", type: { name: "String" } } }; const sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], mapper: { serializedName: "x-ms-source-content-md5", xmlName: "x-ms-source-content-md5", type: { name: "ByteArray" } } }; const copySourceAuthorization = { parameterPath: ["options", "copySourceAuthorization"], mapper: { serializedName: "x-ms-copy-source-authorization", xmlName: "x-ms-copy-source-authorization", type: { name: "String" } } }; const copySourceTags = { parameterPath: ["options", "copySourceTags"], mapper: { serializedName: "x-ms-copy-source-tag-option", xmlName: "x-ms-copy-source-tag-option", type: { name: "Enum", allowedValues: ["REPLACE", "COPY"] } } }; const comp15 = { parameterPath: "comp", mapper: { defaultValue: "copy", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", mapper: { defaultValue: "abort", isConstant: true, serializedName: "x-ms-copy-action", type: { name: "String" } } }; const copyId = { parameterPath: "copyId", mapper: { serializedName: "copyid", required: true, xmlName: "copyid", type: { name: "String" } } }; const comp16 = { parameterPath: "comp", mapper: { defaultValue: "tier", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const tier1 = { parameterPath: "tier", mapper: { serializedName: "x-ms-access-tier", required: true, xmlName: "x-ms-access-tier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold" ] } } }; const queryRequest = { parameterPath: ["options", "queryRequest"], mapper: QueryRequest }; const comp17 = { parameterPath: "comp", mapper: { defaultValue: "query", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const comp18 = { parameterPath: "comp", mapper: { defaultValue: "tags", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const tags = { parameterPath: ["options", "tags"], mapper: BlobTags }; const transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], mapper: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { name: "ByteArray" } } }; const transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], mapper: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray" } } }; const blobType = { parameterPath: "blobType", mapper: { defaultValue: "PageBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String" } } }; const blobContentLength = { parameterPath: "blobContentLength", mapper: { serializedName: "x-ms-blob-content-length", required: true, xmlName: "x-ms-blob-content-length", type: { name: "Number" } } }; const blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { defaultValue: 0, serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number" } } }; const contentType1 = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/octet-stream", isConstant: true, serializedName: "Content-Type", type: { name: "String" } } }; const body1 = { parameterPath: "body", mapper: { serializedName: "body", required: true, xmlName: "body", type: { name: "Stream" } } }; const accept2 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String" } } }; const comp19 = { parameterPath: "comp", mapper: { defaultValue: "page", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const pageWrite = { parameterPath: "pageWrite", mapper: { defaultValue: "update", isConstant: true, serializedName: "x-ms-page-write", type: { name: "String" } } }; const ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberLessThanOrEqualTo" ], mapper: { serializedName: "x-ms-if-sequence-number-le", xmlName: "x-ms-if-sequence-number-le", type: { name: "Number" } } }; const ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberLessThan" ], mapper: { serializedName: "x-ms-if-sequence-number-lt", xmlName: "x-ms-if-sequence-number-lt", type: { name: "Number" } } }; const ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberEqualTo" ], mapper: { serializedName: "x-ms-if-sequence-number-eq", xmlName: "x-ms-if-sequence-number-eq", type: { name: "Number" } } }; const pageWrite1 = { parameterPath: "pageWrite", mapper: { defaultValue: "clear", isConstant: true, serializedName: "x-ms-page-write", type: { name: "String" } } }; const sourceUrl = { parameterPath: "sourceUrl", mapper: { serializedName: "x-ms-copy-source", required: true, xmlName: "x-ms-copy-source", type: { name: "String" } } }; const sourceRange = { parameterPath: "sourceRange", mapper: { serializedName: "x-ms-source-range", required: true, xmlName: "x-ms-source-range", type: { name: "String" } } }; const sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], mapper: { serializedName: "x-ms-source-content-crc64", xmlName: "x-ms-source-content-crc64", type: { name: "ByteArray" } } }; const range1 = { parameterPath: "range", mapper: { serializedName: "x-ms-range", required: true, xmlName: "x-ms-range", type: { name: "String" } } }; const comp20 = { parameterPath: "comp", mapper: { defaultValue: "pagelist", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const prevsnapshot = { parameterPath: ["options", "prevsnapshot"], mapper: { serializedName: "prevsnapshot", xmlName: "prevsnapshot", type: { name: "String" } } }; const prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], mapper: { serializedName: "x-ms-previous-snapshot-url", xmlName: "x-ms-previous-snapshot-url", type: { name: "String" } } }; const sequenceNumberAction = { parameterPath: "sequenceNumberAction", mapper: { serializedName: "x-ms-sequence-number-action", required: true, xmlName: "x-ms-sequence-number-action", type: { name: "Enum", allowedValues: [ "max", "update", "increment" ] } } }; const comp21 = { parameterPath: "comp", mapper: { defaultValue: "incrementalcopy", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const blobType1 = { parameterPath: "blobType", mapper: { defaultValue: "AppendBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String" } } }; const comp22 = { parameterPath: "comp", mapper: { defaultValue: "appendblock", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const maxSize = { parameterPath: [ "options", "appendPositionAccessConditions", "maxSize" ], mapper: { serializedName: "x-ms-blob-condition-maxsize", xmlName: "x-ms-blob-condition-maxsize", type: { name: "Number" } } }; const appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", "appendPosition" ], mapper: { serializedName: "x-ms-blob-condition-appendpos", xmlName: "x-ms-blob-condition-appendpos", type: { name: "Number" } } }; const sourceRange1 = { parameterPath: ["options", "sourceRange"], mapper: { serializedName: "x-ms-source-range", xmlName: "x-ms-source-range", type: { name: "String" } } }; const comp23 = { parameterPath: "comp", mapper: { defaultValue: "seal", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const blobType2 = { parameterPath: "blobType", mapper: { defaultValue: "BlockBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String" } } }; const copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], mapper: { serializedName: "x-ms-copy-source-blob-properties", xmlName: "x-ms-copy-source-blob-properties", type: { name: "Boolean" } } }; const comp24 = { parameterPath: "comp", mapper: { defaultValue: "block", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const blockId = { parameterPath: "blockId", mapper: { serializedName: "blockid", required: true, xmlName: "blockid", type: { name: "String" } } }; const blocks = { parameterPath: "blocks", mapper: BlockLookupList }; const comp25 = { parameterPath: "comp", mapper: { defaultValue: "blocklist", isConstant: true, serializedName: "comp", type: { name: "String" } } }; const listType = { parameterPath: "listType", mapper: { defaultValue: "committed", serializedName: "blocklisttype", required: true, xmlName: "blocklisttype", type: { name: "Enum", allowedValues: [ "committed", "uncommitted", "all" ] } } }; /** Class containing Service operations. */ var ServiceImpl = class { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * Sets properties for a storage account's Blob service endpoint, including properties for Storage * Analytics and CORS (Cross-Origin Resource Sharing) rules * @param blobServiceProperties The StorageService properties. * @param options The options parameters. */ setProperties(blobServiceProperties$1, options) { return this.client.sendOperationRequest({ blobServiceProperties: blobServiceProperties$1, options }, setPropertiesOperationSpec); } /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics * and CORS (Cross-Origin Resource Sharing) rules. * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the * secondary location endpoint when read-access geo-redundant replication is enabled for the storage * account. * @param options The options parameters. */ getStatistics(options) { return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); } /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ listContainersSegment(options) { return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); } /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * @param keyInfo Key information * @param options The options parameters. */ getUserDelegationKey(keyInfo$1, options) { return this.client.sendOperationRequest({ keyInfo: keyInfo$1, options }, getUserDelegationKeyOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch * boundary. Example header value: multipart/mixed; boundary=batch_ * @param body Initial data * @param options The options parameters. */ submitBatch(contentLength$1, multipartContentType$1, body$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, multipartContentType: multipartContentType$1, body: body$1, options }, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a * given search expression. Filter blobs searches across all containers within a storage account but * can be scoped within the expression to a single container. * @param options The options parameters. */ filterBlobs(options) { return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); } }; const xmlSerializer$5 = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { 202: { headersMapper: ServiceSetPropertiesHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceSetPropertiesExceptionHeaders } }, requestBody: blobServiceProperties, queryParameters: [ restype, comp, timeoutInSeconds ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5 }; const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: BlobServiceProperties, headersMapper: ServiceGetPropertiesHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceGetPropertiesExceptionHeaders } }, queryParameters: [ restype, comp, timeoutInSeconds ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$5 }; const getStatisticsOperationSpec = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: BlobServiceStatistics, headersMapper: ServiceGetStatisticsHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceGetStatisticsExceptionHeaders } }, queryParameters: [ restype, timeoutInSeconds, comp1 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$5 }; const listContainersSegmentOperationSpec = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: ListContainersSegmentResponse, headersMapper: ServiceListContainersSegmentHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceListContainersSegmentExceptionHeaders } }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, include ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$5 }; const getUserDelegationKeyOperationSpec = { path: "/", httpMethod: "POST", responses: { 200: { bodyMapper: UserDelegationKey, headersMapper: ServiceGetUserDelegationKeyHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceGetUserDelegationKeyExceptionHeaders } }, requestBody: keyInfo, queryParameters: [ restype, timeoutInSeconds, comp3 ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5 }; const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { headersMapper: ServiceGetAccountInfoHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceGetAccountInfoExceptionHeaders } }, queryParameters: [ comp, timeoutInSeconds, restype1 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$5 }; const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { 202: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: ServiceSubmitBatchHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceSubmitBatchExceptionHeaders } }, requestBody: body, queryParameters: [timeoutInSeconds, comp4], urlParameters: [url], headerParameters: [ accept, version, requestId, contentLength, multipartContentType ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5 }; const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: FilterBlobSegment, headersMapper: ServiceFilterBlobsHeaders }, default: { bodyMapper: StorageError, headersMapper: ServiceFilterBlobsExceptionHeaders } }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, where ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$5 }; /** Class containing Container operations. */ var ContainerImpl = class { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * creates a new container under the specified account. If the container with the same name already * exists, the operation fails * @param options The options parameters. */ create(options) { return this.client.sendOperationRequest({ options }, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data * returned does not include the container's list of blobs * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); } /** * operation marks the specified container for deletion. The container and any blobs contained within * it are later deleted during garbage collection * @param options The options parameters. */ delete(options) { return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ setMetadata(options) { return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data * may be accessed publicly. * @param options The options parameters. */ getAccessPolicy(options) { return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } /** * sets the permissions for the specified container. The permissions indicate whether blobs in a * container may be accessed publicly. * @param options The options parameters. */ setAccessPolicy(options) { return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } /** * Restores a previously-deleted container. * @param options The options parameters. */ restore(options) { return this.client.sendOperationRequest({ options }, restoreOperationSpec); } /** * Renames an existing container. * @param sourceContainerName Required. Specifies the name of the container to rename. * @param options The options parameters. */ rename(sourceContainerName$1, options) { return this.client.sendOperationRequest({ sourceContainerName: sourceContainerName$1, options }, renameOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch * boundary. Example header value: multipart/mixed; boundary=batch_ * @param body Initial data * @param options The options parameters. */ submitBatch(contentLength$1, multipartContentType$1, body$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, multipartContentType: multipartContentType$1, body: body$1, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given * search expression. Filter blobs searches within the given container. * @param options The options parameters. */ filterBlobs(options) { return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ acquireLease(options) { return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ releaseLease(leaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, options }, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ renewLease(leaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, options }, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ breakLease(options) { return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor * (String) for a list of valid GUID string formats. * @param options The options parameters. */ changeLease(leaseId$1, proposedLeaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, proposedLeaseId: proposedLeaseId$1, options }, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ listBlobFlatSegment(options) { return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix * element in the response body that acts as a placeholder for all blobs whose names begin with the * same substring up to the appearance of the delimiter character. The delimiter may be a single * character or a string. * @param options The options parameters. */ listBlobHierarchySegment(delimiter$1, options) { return this.client.sendOperationRequest({ delimiter: delimiter$1, options }, listBlobHierarchySegmentOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); } }; const xmlSerializer$4 = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerCreateHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerCreateExceptionHeaders } }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, access, defaultEncryptionScope, preventEncryptionScopeOverride ], isXML: true, serializer: xmlSerializer$4 }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { headersMapper: ContainerGetPropertiesHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerGetPropertiesExceptionHeaders } }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId ], isXML: true, serializer: xmlSerializer$4 }; const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { 202: { headersMapper: ContainerDeleteHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerDeleteExceptionHeaders } }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince ], isXML: true, serializer: xmlSerializer$4 }; const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerSetMetadataHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerSetMetadataExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp6 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince ], isXML: true, serializer: xmlSerializer$4 }; const getAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: { type: { name: "Sequence", element: { type: { name: "Composite", className: "SignedIdentifier" } } }, serializedName: "SignedIdentifiers", xmlName: "SignedIdentifiers", xmlIsWrapped: true, xmlElementName: "SignedIdentifier" }, headersMapper: ContainerGetAccessPolicyHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerGetAccessPolicyExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp7 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId ], isXML: true, serializer: xmlSerializer$4 }; const setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerSetAccessPolicyHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerSetAccessPolicyExceptionHeaders } }, requestBody: containerAcl, queryParameters: [ timeoutInSeconds, restype2, comp7 ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, access, leaseId, ifModifiedSince, ifUnmodifiedSince ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$4 }; const restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerRestoreHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerRestoreExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp8 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, deletedContainerName, deletedContainerVersion ], isXML: true, serializer: xmlSerializer$4 }; const renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerRenameHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerRenameExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp9 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, sourceContainerName, sourceLeaseId ], isXML: true, serializer: xmlSerializer$4 }; const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { 202: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: ContainerSubmitBatchHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerSubmitBatchExceptionHeaders } }, requestBody: body, queryParameters: [ timeoutInSeconds, comp4, restype2 ], urlParameters: [url], headerParameters: [ accept, version, requestId, contentLength, multipartContentType ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$4 }; const filterBlobsOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: FilterBlobSegment, headersMapper: ContainerFilterBlobsHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerFilterBlobsExceptionHeaders } }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, where, restype2 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$4 }; const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerAcquireLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerAcquireLeaseExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp10 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action, duration, proposedLeaseId ], isXML: true, serializer: xmlSerializer$4 }; const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerReleaseLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerReleaseLeaseExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp10 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action1, leaseId1 ], isXML: true, serializer: xmlSerializer$4 }; const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerRenewLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerRenewLeaseExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp10 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action2 ], isXML: true, serializer: xmlSerializer$4 }; const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 202: { headersMapper: ContainerBreakLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerBreakLeaseExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp10 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action3, breakPeriod ], isXML: true, serializer: xmlSerializer$4 }; const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerChangeLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerChangeLeaseExceptionHeaders } }, queryParameters: [ timeoutInSeconds, restype2, comp10 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action4, proposedLeaseId1 ], isXML: true, serializer: xmlSerializer$4 }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: ListBlobsFlatSegmentResponse, headersMapper: ContainerListBlobFlatSegmentHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerListBlobFlatSegmentExceptionHeaders } }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, restype2, include1 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$4 }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: ListBlobsHierarchySegmentResponse, headersMapper: ContainerListBlobHierarchySegmentHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders } }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, restype2, include1, delimiter ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$4 }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { headersMapper: ContainerGetAccountInfoHeaders }, default: { bodyMapper: StorageError, headersMapper: ContainerGetAccountInfoExceptionHeaders } }, queryParameters: [ comp, timeoutInSeconds, restype1 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$4 }; /** Class containing Blob operations. */ var BlobImpl = class { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Download operation reads or downloads a blob from the system, including its metadata and * properties. You can also call Download to read a snapshot. * @param options The options parameters. */ download(options) { return this.client.sendOperationRequest({ options }, downloadOperationSpec); } /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system * properties for the blob. It does not return the content of the blob. * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is * permanently removed from the storage account. If the storage account's soft delete feature is * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible * immediately. However, the blob service retains the blob or snapshot for the number of days specified * by the DeleteRetentionPolicy section of [Storage service properties] * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is * permanently removed from the storage account. Note that you continue to be charged for the * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 * (ResourceNotFound). * @param options The options parameters. */ delete(options) { return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ undelete(options) { return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } /** * Sets the time a blob will expire and be deleted. * @param expiryOptions Required. Indicates mode of the expiry time * @param options The options parameters. */ setExpiry(expiryOptions$1, options) { return this.client.sendOperationRequest({ expiryOptions: expiryOptions$1, options }, setExpiryOperationSpec); } /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ setHttpHeaders(options) { return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } /** * The Set Immutability Policy operation sets the immutability policy on the blob * @param options The options parameters. */ setImmutabilityPolicy(options) { return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } /** * The Delete Immutability Policy operation deletes the immutability policy on the blob * @param options The options parameters. */ deleteImmutabilityPolicy(options) { return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } /** * The Set Legal Hold operation sets a legal hold on the blob. * @param legalHold Specified if a legal hold should be set on the blob. * @param options The options parameters. */ setLegalHold(legalHold$1, options) { return this.client.sendOperationRequest({ legalHold: legalHold$1, options }, setLegalHoldOperationSpec); } /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more * name-value pairs * @param options The options parameters. */ setMetadata(options) { return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ acquireLease(options) { return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ releaseLease(leaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, options }, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ renewLease(leaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, options }, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor * (String) for a list of valid GUID string formats. * @param options The options parameters. */ changeLease(leaseId$1, proposedLeaseId$1, options) { return this.client.sendOperationRequest({ leaseId: leaseId$1, proposedLeaseId: proposedLeaseId$1, options }, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ breakLease(options) { return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ createSnapshot(options) { return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ startCopyFromURL(copySource$1, options) { return this.client.sendOperationRequest({ copySource: copySource$1, options }, startCopyFromURLOperationSpec); } /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return * a response until the copy is complete. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ copyFromURL(copySource$1, options) { return this.client.sendOperationRequest({ copySource: copySource$1, options }, copyFromURLOperationSpec); } /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination * blob with zero length and full metadata. * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob * operation. * @param options The options parameters. */ abortCopyFromURL(copyId$1, options) { return this.client.sendOperationRequest({ copyId: copyId$1, options }, abortCopyFromURLOperationSpec); } /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant storage only). A * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's * ETag. * @param tier Indicates the tier to be set on the blob. * @param options The options parameters. */ setTier(tier$1, options) { return this.client.sendOperationRequest({ tier: tier$1, options }, setTierOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query * expressions. * @param options The options parameters. */ query(options) { return this.client.sendOperationRequest({ options }, queryOperationSpec); } /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ getTags(options) { return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ setTags(options) { return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } }; const xmlSerializer$3 = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: BlobDownloadHeaders }, 206: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: BlobDownloadHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobDownloadExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, rangeGetContentMD5, rangeGetContentCRC64, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { 200: { headersMapper: BlobGetPropertiesHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobGetPropertiesExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 202: { headersMapper: BlobDeleteHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobDeleteExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, blobDeleteType ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, deleteSnapshots ], isXML: true, serializer: xmlSerializer$3 }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobUndeleteHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobUndeleteExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp8], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$3 }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetExpiryHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetExpiryExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp11], urlParameters: [url], headerParameters: [ version, requestId, accept1, expiryOptions, expiresOn ], isXML: true, serializer: xmlSerializer$3 }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetHttpHeadersHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetHttpHeadersExceptionHeaders } }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition ], isXML: true, serializer: xmlSerializer$3 }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetImmutabilityPolicyHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetImmutabilityPolicyExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp12 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifUnmodifiedSince, immutabilityPolicyExpiry, immutabilityPolicyMode ], isXML: true, serializer: xmlSerializer$3 }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 200: { headersMapper: BlobDeleteImmutabilityPolicyHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp12 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$3 }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetLegalHoldHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetLegalHoldExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp13 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, legalHold ], isXML: true, serializer: xmlSerializer$3 }; const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetMetadataHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetMetadataExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp6], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope ], isXML: true, serializer: xmlSerializer$3 }; const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlobAcquireLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobAcquireLeaseExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action, duration, proposedLeaseId, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobReleaseLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobReleaseLeaseExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action1, leaseId1, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobRenewLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobRenewLeaseExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action2, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobChangeLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobChangeLeaseExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action4, proposedLeaseId1, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobBreakLeaseHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobBreakLeaseExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action3, breakPeriod, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlobCreateSnapshotHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobCreateSnapshotExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp14], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope ], isXML: true, serializer: xmlSerializer$3 }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobStartCopyFromURLHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobStartCopyFromURLExceptionHeaders } }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, tier, rehydratePriority, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceIfTags, copySource, blobTagsString, sealBlob, legalHold1 ], isXML: true, serializer: xmlSerializer$3 }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobCopyFromURLHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobCopyFromURLExceptionHeaders } }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, copySource, blobTagsString, legalHold1, xMsRequiresSync, sourceContentMD5, copySourceAuthorization, copySourceTags ], isXML: true, serializer: xmlSerializer$3 }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { headersMapper: BlobAbortCopyFromURLHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobAbortCopyFromURLExceptionHeaders } }, queryParameters: [ timeoutInSeconds, comp15, copyId ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, copyActionAbortConstant ], isXML: true, serializer: xmlSerializer$3 }; const setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetTierHeaders }, 202: { headersMapper: BlobSetTierHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetTierExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp16 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags, rehydratePriority, tier1 ], isXML: true, serializer: xmlSerializer$3 }; const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { headersMapper: BlobGetAccountInfoHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobGetAccountInfoExceptionHeaders } }, queryParameters: [ comp, timeoutInSeconds, restype1 ], urlParameters: [url], headerParameters: [ version, requestId, accept1 ], isXML: true, serializer: xmlSerializer$3 }; const queryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "POST", responses: { 200: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: BlobQueryHeaders }, 206: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse" }, headersMapper: BlobQueryHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobQueryExceptionHeaders } }, requestBody: queryRequest, queryParameters: [ timeoutInSeconds, snapshot, comp17 ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$3 }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: BlobTags, headersMapper: BlobGetTagsHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobGetTagsExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp18 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags ], isXML: true, serializer: xmlSerializer$3 }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { headersMapper: BlobSetTagsHeaders }, default: { bodyMapper: StorageError, headersMapper: BlobSetTagsExceptionHeaders } }, requestBody: tags, queryParameters: [ timeoutInSeconds, versionId, comp18 ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, leaseId, ifTags, transactionalContentMD5, transactionalContentCrc64 ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$3 }; /** Class containing PageBlob operations. */ var PageBlobImpl = class { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Create operation creates a new page blob. * @param contentLength The length of the request. * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ create(contentLength$1, blobContentLength$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, blobContentLength: blobContentLength$1, options }, createOperationSpec$1); } /** * The Upload Pages operation writes a range of pages to a page blob * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ uploadPages(contentLength$1, body$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, body: body$1, options }, uploadPagesOperationSpec); } /** * The Clear Pages operation clears a set of pages from a page blob * @param contentLength The length of the request. * @param options The options parameters. */ clearPages(contentLength$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, options }, clearPagesOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a * URL * @param sourceUrl Specify a URL to the copy source. * @param sourceRange Bytes of source data in the specified range. The length of this range should * match the ContentLength header and x-ms-range/Range destination range header. * @param contentLength The length of the request. * @param range The range of bytes to which the source range would be written. The range should be 512 * aligned and range-end is required. * @param options The options parameters. */ uploadPagesFromURL(sourceUrl$1, sourceRange$1, contentLength$1, range$2, options) { return this.client.sendOperationRequest({ sourceUrl: sourceUrl$1, sourceRange: sourceRange$1, contentLength: contentLength$1, range: range$2, options }, uploadPagesFromURLOperationSpec); } /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a * page blob * @param options The options parameters. */ getPageRanges(options) { return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were * changed between target blob and previous snapshot. * @param options The options parameters. */ getPageRangesDiff(options) { return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** * Resize the Blob * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ resize(blobContentLength$1, options) { return this.client.sendOperationRequest({ blobContentLength: blobContentLength$1, options }, resizeOperationSpec); } /** * Update the sequence number of the blob * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. * This property applies to page blobs only. This property indicates how the service should modify the * blob's sequence number * @param options The options parameters. */ updateSequenceNumber(sequenceNumberAction$1, options) { return this.client.sendOperationRequest({ sequenceNumberAction: sequenceNumberAction$1, options }, updateSequenceNumberOperationSpec); } /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. * The snapshot is copied such that only the differential changes between the previously copied * snapshot are transferred to the destination. The copied snapshots are complete copies of the * original snapshot and can be read or copied from as usual. This API is supported since REST version * 2016-05-31. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ copyIncremental(copySource$1, options) { return this.client.sendOperationRequest({ copySource: copySource$1, options }, copyIncrementalOperationSpec); } }; const xmlSerializer$2 = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobCreateHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobCreateExceptionHeaders } }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, blobType, blobContentLength, blobSequenceNumber ], isXML: true, serializer: xmlSerializer$2 }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobUploadPagesHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobUploadPagesExceptionHeaders } }, requestBody: body1, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, range, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, pageWrite, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer$2 }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobClearPagesHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobClearPagesExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, range, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, pageWrite1 ], isXML: true, serializer: xmlSerializer$2 }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobUploadPagesFromURLHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobUploadPagesFromURLExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, pageWrite, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, sourceUrl, sourceRange, sourceContentCrc64, range1 ], isXML: true, serializer: xmlSerializer$2 }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: PageList, headersMapper: PageBlobGetPageRangesHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobGetPageRangesExceptionHeaders } }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, snapshot, comp20 ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, ifMatch, ifNoneMatch, ifTags ], isXML: true, serializer: xmlSerializer$2 }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: PageList, headersMapper: PageBlobGetPageRangesDiffHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobGetPageRangesDiffExceptionHeaders } }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, snapshot, comp20, prevsnapshot ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, ifMatch, ifNoneMatch, ifTags, prevSnapshotUrl ], isXML: true, serializer: xmlSerializer$2 }; const resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: PageBlobResizeHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobResizeExceptionHeaders } }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, blobContentLength ], isXML: true, serializer: xmlSerializer$2 }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: PageBlobUpdateSequenceNumberHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders } }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, blobSequenceNumber, sequenceNumberAction ], isXML: true, serializer: xmlSerializer$2 }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: PageBlobCopyIncrementalHeaders }, default: { bodyMapper: StorageError, headersMapper: PageBlobCopyIncrementalExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp21], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, copySource ], isXML: true, serializer: xmlSerializer$2 }; /** Class containing AppendBlob operations. */ var AppendBlobImpl = class { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Create Append Blob operation creates a new append blob. * @param contentLength The length of the request. * @param options The options parameters. */ create(contentLength$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, options }, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ appendBlock(contentLength$1, body$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, body: body$1, options }, appendBlockOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob where * the contents are read from a source url. The Append Block operation is permitted only if the blob * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version * 2015-02-21 version or later. * @param sourceUrl Specify a URL to the copy source. * @param contentLength The length of the request. * @param options The options parameters. */ appendBlockFromUrl(sourceUrl$1, contentLength$1, options) { return this.client.sendOperationRequest({ sourceUrl: sourceUrl$1, contentLength: contentLength$1, options }, appendBlockFromUrlOperationSpec); } /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version * 2019-12-12 version or later. * @param options The options parameters. */ seal(options) { return this.client.sendOperationRequest({ options }, sealOperationSpec); } }; const xmlSerializer$1 = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobCreateHeaders }, default: { bodyMapper: StorageError, headersMapper: AppendBlobCreateExceptionHeaders } }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, blobTagsString, legalHold1, blobType1 ], isXML: true, serializer: xmlSerializer$1 }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobAppendBlockHeaders }, default: { bodyMapper: StorageError, headersMapper: AppendBlobAppendBlockExceptionHeaders } }, requestBody: body1, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, maxSize, appendPosition ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer$1 }; const appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobAppendBlockFromUrlHeaders }, default: { bodyMapper: StorageError, headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, transactionalContentMD5, sourceUrl, sourceContentCrc64, maxSize, appendPosition, sourceRange1 ], isXML: true, serializer: xmlSerializer$1 }; const sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: AppendBlobSealHeaders }, default: { bodyMapper: StorageError, headersMapper: AppendBlobSealExceptionHeaders } }, queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, appendPosition ], isXML: true, serializer: xmlSerializer$1 }; /** Class containing BlockBlob operations. */ var BlockBlobImpl = class { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a * partial update of the content of a block blob, use the Put Block List operation. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ upload(contentLength$1, body$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, body: body$1, options }, uploadOperationSpec); } /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are * not supported with Put Blob from URL; the content of an existing blob is overwritten with the * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, * use the Put Block from URL API in conjunction with Put Block List. * @param contentLength The length of the request. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ putBlobFromUrl(contentLength$1, copySource$1, options) { return this.client.sendOperationRequest({ contentLength: contentLength$1, copySource: copySource$1, options }, putBlobFromUrlOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified * for the blockid parameter must be the same size for each block. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ stageBlock(blockId$1, contentLength$1, body$1, options) { return this.client.sendOperationRequest({ blockId: blockId$1, contentLength: contentLength$1, body: body$1, options }, stageBlockOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents * are read from a URL. * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified * for the blockid parameter must be the same size for each block. * @param contentLength The length of the request. * @param sourceUrl Specify a URL to the copy source. * @param options The options parameters. */ stageBlockFromURL(blockId$1, contentLength$1, sourceUrl$1, options) { return this.client.sendOperationRequest({ blockId: blockId$1, contentLength: contentLength$1, sourceUrl: sourceUrl$1, options }, stageBlockFromURLOperationSpec); } /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the * blob. In order to be written as part of a blob, a block must have been successfully written to the * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading * only those blocks that have changed, then committing the new and existing blocks together. You can * do this by specifying whether to commit a block from the committed block list or from the * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list * it may belong to. * @param blocks Blob Blocks. * @param options The options parameters. */ commitBlockList(blocks$1, options) { return this.client.sendOperationRequest({ blocks: blocks$1, options }, commitBlockListOperationSpec); } /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block * blob * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted * blocks, or both lists together. * @param options The options parameters. */ getBlockList(listType$1, options) { return this.client.sendOperationRequest({ listType: listType$1, options }, getBlockListOperationSpec); } }; const xmlSerializer = coreClient__namespace.createSerializer( Mappers, /* isXml */ true ); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobUploadHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobUploadExceptionHeaders } }, requestBody: body1, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, blobType2 ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobPutBlobFromUrlHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders } }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceIfTags, copySource, blobTagsString, sourceContentMD5, copySourceAuthorization, copySourceTags, transactionalContentMD5, blobType2, copySourceBlobProperties ], isXML: true, serializer: xmlSerializer }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobStageBlockHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobStageBlockExceptionHeaders } }, requestBody: body1, queryParameters: [ timeoutInSeconds, comp24, blockId ], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, encryptionKey, encryptionKeySha256, encryptionAlgorithm, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2 ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobStageBlockFromURLHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobStageBlockFromURLExceptionHeaders } }, queryParameters: [ timeoutInSeconds, comp24, blockId ], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, encryptionKey, encryptionKeySha256, encryptionAlgorithm, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, sourceUrl, sourceContentCrc64, sourceRange1 ], isXML: true, serializer: xmlSerializer }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobCommitBlockListHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobCommitBlockListExceptionHeaders } }, requestBody: blocks, queryParameters: [timeoutInSeconds, comp25], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, transactionalContentMD5, transactionalContentCrc64 ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: BlockList, headersMapper: BlockBlobGetBlockListHeaders }, default: { bodyMapper: StorageError, headersMapper: BlockBlobGetBlockListExceptionHeaders } }, queryParameters: [ timeoutInSeconds, snapshot, comp25, listType ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags ], isXML: true, serializer: xmlSerializer }; let StorageClient$1 = class StorageClient$2 extends coreHttpCompat__namespace.ExtendedServiceClient { /** * Initializes a new instance of the StorageClient class. * @param url The URL of the service account, container, or blob that is the target of the desired * operation. * @param options The parameter options */ constructor(url$1, options) { var _a$2, _b$1; if (url$1 === void 0) throw new Error("'url' cannot be null"); if (!options) options = {}; const defaults = { requestContentType: "application/json; charset=utf-8" }; const packageDetails = `azsdk-js-azure-storage-blob/12.27.0`; const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { userAgentPrefix }, endpoint: (_b$1 = (_a$2 = options.endpoint) !== null && _a$2 !== void 0 ? _a$2 : options.baseUri) !== null && _b$1 !== void 0 ? _b$1 : "{url}" }); super(optionsWithDefaults); this.url = url$1; this.version = options.version || "2025-05-05"; this.service = new ServiceImpl(this); this.container = new ContainerImpl(this); this.blob = new BlobImpl(this); this.pageBlob = new PageBlobImpl(this); this.appendBlob = new AppendBlobImpl(this); this.blockBlob = new BlockBlobImpl(this); } }; /** * @internal */ var StorageContextClient = class extends StorageClient$1 { async sendOperationRequest(operationArguments, operationSpec) { const operationSpecToSend = Object.assign({}, operationSpec); if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") operationSpecToSend.path = ""; return super.sendOperationRequest(operationArguments, operationSpecToSend); } }; /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. */ var StorageClient = class { /** * Creates an instance of StorageClient. * @param url - url to resource * @param pipeline - request policy pipeline. */ constructor(url$1, pipeline) { this.url = escapeURLPath(url$1); this.accountName = getAccountNameFromUrl(url$1); this.pipeline = pipeline; this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); this.credential = getCredentialFromPipeline(pipeline); const storageClientContext = this.storageClientContext; storageClientContext.requestContentType = void 0; } }; /** * Creates a span using the global tracer. * @internal */ const tracingClient = coreTracing.createTracingClient({ packageName: "@azure/storage-blob", packageVersion: SDK_VERSION, namespace: "Microsoft.Storage" }); /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all * the values are set, this should be serialized with toString and set as the permissions field on a * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ var BlobSASPermissions = class BlobSASPermissions { constructor() { /** * Specifies Read access granted. */ this.read = false; /** * Specifies Add access granted. */ this.add = false; /** * Specifies Create access granted. */ this.create = false; /** * Specifies Write access granted. */ this.write = false; /** * Specifies Delete access granted. */ this.delete = false; /** * Specifies Delete version access granted. */ this.deleteVersion = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Specifies Move access granted. */ this.move = false; /** * Specifies Execute access granted. */ this.execute = false; /** * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an * Error if it encounters a character that does not correspond to a valid permission. * * @param permissions - */ static parse(permissions) { const blobSASPermissions = new BlobSASPermissions(); for (const char of permissions) switch (char) { case "r": blobSASPermissions.read = true; break; case "a": blobSASPermissions.add = true; break; case "c": blobSASPermissions.create = true; break; case "w": blobSASPermissions.write = true; break; case "d": blobSASPermissions.delete = true; break; case "x": blobSASPermissions.deleteVersion = true; break; case "t": blobSASPermissions.tag = true; break; case "m": blobSASPermissions.move = true; break; case "e": blobSASPermissions.execute = true; break; case "i": blobSASPermissions.setImmutabilityPolicy = true; break; case "y": blobSASPermissions.permanentDelete = true; break; default: throw new RangeError(`Invalid permission: ${char}`); } return blobSASPermissions; } /** * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const blobSASPermissions = new BlobSASPermissions(); if (permissionLike.read) blobSASPermissions.read = true; if (permissionLike.add) blobSASPermissions.add = true; if (permissionLike.create) blobSASPermissions.create = true; if (permissionLike.write) blobSASPermissions.write = true; if (permissionLike.delete) blobSASPermissions.delete = true; if (permissionLike.deleteVersion) blobSASPermissions.deleteVersion = true; if (permissionLike.tag) blobSASPermissions.tag = true; if (permissionLike.move) blobSASPermissions.move = true; if (permissionLike.execute) blobSASPermissions.execute = true; if (permissionLike.setImmutabilityPolicy) blobSASPermissions.setImmutabilityPolicy = true; if (permissionLike.permanentDelete) blobSASPermissions.permanentDelete = true; return blobSASPermissions; } /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * @returns A string which represents the BlobSASPermissions */ toString() { const permissions = []; if (this.read) permissions.push("r"); if (this.add) permissions.push("a"); if (this.create) permissions.push("c"); if (this.write) permissions.push("w"); if (this.delete) permissions.push("d"); if (this.deleteVersion) permissions.push("x"); if (this.tag) permissions.push("t"); if (this.move) permissions.push("m"); if (this.execute) permissions.push("e"); if (this.setImmutabilityPolicy) permissions.push("i"); if (this.permanentDelete) permissions.push("y"); return permissions.join(""); } }; /** * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. * Once all the values are set, this should be serialized with toString and set as the permissions field on a * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ var ContainerSASPermissions = class ContainerSASPermissions { constructor() { /** * Specifies Read access granted. */ this.read = false; /** * Specifies Add access granted. */ this.add = false; /** * Specifies Create access granted. */ this.create = false; /** * Specifies Write access granted. */ this.write = false; /** * Specifies Delete access granted. */ this.delete = false; /** * Specifies Delete version access granted. */ this.deleteVersion = false; /** * Specifies List access granted. */ this.list = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Specifies Move access granted. */ this.move = false; /** * Specifies Execute access granted. */ this.execute = false; /** * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; /** * Specifies that Filter Blobs by Tags is permitted. */ this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an * Error if it encounters a character that does not correspond to a valid permission. * * @param permissions - */ static parse(permissions) { const containerSASPermissions = new ContainerSASPermissions(); for (const char of permissions) switch (char) { case "r": containerSASPermissions.read = true; break; case "a": containerSASPermissions.add = true; break; case "c": containerSASPermissions.create = true; break; case "w": containerSASPermissions.write = true; break; case "d": containerSASPermissions.delete = true; break; case "l": containerSASPermissions.list = true; break; case "t": containerSASPermissions.tag = true; break; case "x": containerSASPermissions.deleteVersion = true; break; case "m": containerSASPermissions.move = true; break; case "e": containerSASPermissions.execute = true; break; case "i": containerSASPermissions.setImmutabilityPolicy = true; break; case "y": containerSASPermissions.permanentDelete = true; break; case "f": containerSASPermissions.filterByTags = true; break; default: throw new RangeError(`Invalid permission ${char}`); } return containerSASPermissions; } /** * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const containerSASPermissions = new ContainerSASPermissions(); if (permissionLike.read) containerSASPermissions.read = true; if (permissionLike.add) containerSASPermissions.add = true; if (permissionLike.create) containerSASPermissions.create = true; if (permissionLike.write) containerSASPermissions.write = true; if (permissionLike.delete) containerSASPermissions.delete = true; if (permissionLike.list) containerSASPermissions.list = true; if (permissionLike.deleteVersion) containerSASPermissions.deleteVersion = true; if (permissionLike.tag) containerSASPermissions.tag = true; if (permissionLike.move) containerSASPermissions.move = true; if (permissionLike.execute) containerSASPermissions.execute = true; if (permissionLike.setImmutabilityPolicy) containerSASPermissions.setImmutabilityPolicy = true; if (permissionLike.permanentDelete) containerSASPermissions.permanentDelete = true; if (permissionLike.filterByTags) containerSASPermissions.filterByTags = true; return containerSASPermissions; } /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * The order of the characters should be as specified here to ensure correctness. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * */ toString() { const permissions = []; if (this.read) permissions.push("r"); if (this.add) permissions.push("a"); if (this.create) permissions.push("c"); if (this.write) permissions.push("w"); if (this.delete) permissions.push("d"); if (this.deleteVersion) permissions.push("x"); if (this.list) permissions.push("l"); if (this.tag) permissions.push("t"); if (this.move) permissions.push("m"); if (this.execute) permissions.push("e"); if (this.setImmutabilityPolicy) permissions.push("i"); if (this.permanentDelete) permissions.push("y"); if (this.filterByTags) permissions.push("f"); return permissions.join(""); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * UserDelegationKeyCredential is only used for generation of user delegation SAS. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ var UserDelegationKeyCredential = class { /** * Creates an instance of UserDelegationKeyCredential. * @param accountName - * @param userDelegationKey - */ constructor(accountName, userDelegationKey) { this.accountName = accountName; this.userDelegationKey = userDelegationKey; this.key = Buffer.from(userDelegationKey.value, "base64"); } /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ computeHMACSHA256(stringToSign) { return crypto$1.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } }; /** * Generate SasIPRange format string. For example: * * "8.8.8.8" or "1.1.1.1-255.255.255.255" * * @param ipRange - */ function ipRangeToString(ipRange) { return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; } /** * Protocols for generated SAS. */ exports.SASProtocol = void 0; (function(SASProtocol) { /** * Protocol that allows HTTPS only */ SASProtocol["Https"] = "https"; /** * Protocol that allows both HTTPS and HTTP */ SASProtocol["HttpsAndHttp"] = "https,http"; })(exports.SASProtocol || (exports.SASProtocol = {})); /** * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should * be taken here in case there are existing query parameters, which might affect the appropriate means of appending * these query parameters). * * NOTE: Instances of this class are immutable. */ var SASQueryParameters = class { /** * Optional. IP range allowed for this SAS. * * @readonly */ get ipRange() { if (this.ipRangeInner) return { end: this.ipRangeInner.end, start: this.ipRangeInner.start }; return void 0; } constructor(version$1, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn$1, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType$1, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope$1) { this.version = version$1; this.signature = signature; if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { this.permissions = permissionsOrOptions.permissions; this.services = permissionsOrOptions.services; this.resourceTypes = permissionsOrOptions.resourceTypes; this.protocol = permissionsOrOptions.protocol; this.startsOn = permissionsOrOptions.startsOn; this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; this.contentEncoding = permissionsOrOptions.contentEncoding; this.contentLanguage = permissionsOrOptions.contentLanguage; this.contentType = permissionsOrOptions.contentType; if (permissionsOrOptions.userDelegationKey) { this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; this.signedService = permissionsOrOptions.userDelegationKey.signedService; this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; this.correlationId = permissionsOrOptions.correlationId; } } else { this.services = services; this.resourceTypes = resourceTypes; this.expiresOn = expiresOn$1; this.permissions = permissionsOrOptions; this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; this.encryptionScope = encryptionScope$1; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; this.contentDisposition = contentDisposition; this.contentEncoding = contentEncoding; this.contentLanguage = contentLanguage; this.contentType = contentType$1; if (userDelegationKey) { this.signedOid = userDelegationKey.signedObjectId; this.signedTenantId = userDelegationKey.signedTenantId; this.signedStartsOn = userDelegationKey.signedStartsOn; this.signedExpiresOn = userDelegationKey.signedExpiresOn; this.signedService = userDelegationKey.signedService; this.signedVersion = userDelegationKey.signedVersion; this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; this.correlationId = correlationId; } } } /** * Encodes all SAS query parameters into a string that can be appended to a URL. * */ toString() { const params = [ "sv", "ss", "srt", "spr", "st", "se", "sip", "si", "ses", "skoid", "sktid", "skt", "ske", "sks", "skv", "sr", "sp", "sig", "rscc", "rscd", "rsce", "rscl", "rsct", "saoid", "scid" ]; const queries = []; for (const param of params) switch (param) { case "sv": this.tryAppendQueryParameter(queries, param, this.version); break; case "ss": this.tryAppendQueryParameter(queries, param, this.services); break; case "srt": this.tryAppendQueryParameter(queries, param, this.resourceTypes); break; case "spr": this.tryAppendQueryParameter(queries, param, this.protocol); break; case "st": this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : void 0); break; case "se": this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : void 0); break; case "sip": this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : void 0); break; case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; case "ses": this.tryAppendQueryParameter(queries, param, this.encryptionScope); break; case "skoid": this.tryAppendQueryParameter(queries, param, this.signedOid); break; case "sktid": this.tryAppendQueryParameter(queries, param, this.signedTenantId); break; case "skt": this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : void 0); break; case "ske": this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : void 0); break; case "sks": this.tryAppendQueryParameter(queries, param, this.signedService); break; case "skv": this.tryAppendQueryParameter(queries, param, this.signedVersion); break; case "sr": this.tryAppendQueryParameter(queries, param, this.resource); break; case "sp": this.tryAppendQueryParameter(queries, param, this.permissions); break; case "sig": this.tryAppendQueryParameter(queries, param, this.signature); break; case "rscc": this.tryAppendQueryParameter(queries, param, this.cacheControl); break; case "rscd": this.tryAppendQueryParameter(queries, param, this.contentDisposition); break; case "rsce": this.tryAppendQueryParameter(queries, param, this.contentEncoding); break; case "rscl": this.tryAppendQueryParameter(queries, param, this.contentLanguage); break; case "rsct": this.tryAppendQueryParameter(queries, param, this.contentType); break; case "saoid": this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); break; case "scid": this.tryAppendQueryParameter(queries, param, this.correlationId); break; } return queries.join("&"); } /** * A private helper method used to filter and append query key/value pairs into an array. * * @param queries - * @param key - * @param value - */ tryAppendQueryParameter(queries, key, value) { if (!value) return; key = encodeURIComponent(key); value = encodeURIComponent(value); if (key.length > 0 && value.length > 0) queries.push(`${key}=${value}`); } }; function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; } function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { const version$1 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; let userDelegationKeyCredential; if (sharedKeyCredential === void 0 && accountName !== void 0) userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); if (version$1 >= "2020-12-06") if (sharedKeyCredential !== void 0) return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); else return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); if (version$1 >= "2018-11-09") if (sharedKeyCredential !== void 0) return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); else if (version$1 >= "2020-02-10") return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); else return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); if (version$1 >= "2015-04-05") if (sharedKeyCredential !== void 0) return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); else throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); throw new RangeError("'version' must be >= '2015-04-05'."); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); let resource = "c"; if (blobSASSignatureValues.blobName) resource = "b"; let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), stringToSign }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) resource = "bs"; else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), stringToSign }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) resource = "bs"; else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.encryptionScope, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, void 0, void 0, void 0, blobSASSignatureValues.encryptionScope), stringToSign }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) resource = "bs"; else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), stringToSign }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) resource = "bs"; else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, void 0, blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), stringToSign }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) resource = "bs"; else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } let verifiedPermissions; if (blobSASSignatureValues.permissions) if (blobSASSignatureValues.blobName) verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); else verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, void 0, blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.encryptionScope, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, void 0, void 0, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), stringToSign }; } function getCanonicalName(accountName, containerName, blobName) { const elements = [`/blob/${accountName}/${containerName}`]; if (blobName) elements.push(`/${blobName}`); return elements.join(""); } function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { const version$1 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; if (blobSASSignatureValues.snapshotTime && version$1 < "2018-11-09") throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); if (blobSASSignatureValues.versionId && version$1 < "2019-10-10") throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) throw RangeError("Must provide 'blobName' when providing 'versionId'."); if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version$1 < "2020-08-04") throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version$1 < "2019-10-10") throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version$1 < "2019-10-10") throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version$1 < "2019-12-12") throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); if (version$1 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); if (version$1 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); if (version$1 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); if (blobSASSignatureValues.encryptionScope && version$1 < "2020-12-06") throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); blobSASSignatureValues.version = version$1; return blobSASSignatureValues; } /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ var BlobLeaseClient = class { /** * Gets the lease Id. * * @readonly */ get leaseId() { return this._leaseId; } /** * Gets the url. * * @readonly */ get url() { return this._url; } /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ constructor(client, leaseId$1) { const clientContext = client.storageClientContext; this._url = client.url; if (client.name === void 0) { this._isContainer = true; this._containerOrBlobOperation = clientContext.container; } else { this._isContainer = false; this._containerOrBlobOperation = clientContext.blob; } if (!leaseId$1) leaseId$1 = coreUtil.randomUUID(); this._leaseId = leaseId$1; } /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. * The lock duration can be 15 to 60 seconds, or can be infinite. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param duration - Must be between 15 to 60 seconds, or infinite (-1) * @param options - option to configure lease management operations. * @returns Response data for acquire lease operation. */ async acquireLease(duration$1, options = {}) { var _a$2, _b$1, _c$1, _d$1, _e; if (this._isContainer && (((_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.ifMatch) && ((_b$1 = options.conditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch) !== ETagNone || ((_c$1 = options.conditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifNoneMatch) && ((_d$1 = options.conditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { var _a$3; return assertResponse(await this._containerOrBlobOperation.acquireLease({ abortSignal: options.abortSignal, duration: duration$1, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }), proposedLeaseId: this._leaseId, tracingOptions: updatedOptions.tracingOptions })); }); } /** * To change the ID of the lease. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param proposedLeaseId - the proposed new lease Id. * @param options - option to configure lease management operations. * @returns Response data for change lease operation. */ async changeLease(proposedLeaseId$1, options = {}) { var _a$2, _b$1, _c$1, _d$1, _e; if (this._isContainer && (((_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.ifMatch) && ((_b$1 = options.conditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch) !== ETagNone || ((_c$1 = options.conditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifNoneMatch) && ((_d$1 = options.conditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { var _a$3; const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId$1, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); this._leaseId = proposedLeaseId$1; return response; }); } /** * To free the lease if it is no longer needed so that another client may * immediately acquire a lease against the container or the blob. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param options - option to configure lease management operations. * @returns Response data for release lease operation. */ async releaseLease(options = {}) { var _a$2, _b$1, _c$1, _d$1, _e; if (this._isContainer && (((_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.ifMatch) && ((_b$1 = options.conditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch) !== ETagNone || ((_c$1 = options.conditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifNoneMatch) && ((_d$1 = options.conditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { var _a$3; return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } /** * To renew the lease. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param options - Optional option to configure lease management operations. * @returns Response data for renew lease operation. */ async renewLease(options = {}) { var _a$2, _b$1, _c$1, _d$1, _e; if (this._isContainer && (((_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.ifMatch) && ((_b$1 = options.conditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch) !== ETagNone || ((_c$1 = options.conditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifNoneMatch) && ((_d$1 = options.conditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { var _a$3; return this._containerOrBlobOperation.renewLease(this._leaseId, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }), tracingOptions: updatedOptions.tracingOptions }); }); } /** * To end the lease but ensure that another client cannot acquire a new lease * until the current lease period has expired. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://learn.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param breakPeriod - Break period * @param options - Optional options to configure lease management operations. * @returns Response data for break lease operation. */ async breakLease(breakPeriod$1, options = {}) { var _a$2, _b$1, _c$1, _d$1, _e; if (this._isContainer && (((_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.ifMatch) && ((_b$1 = options.conditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch) !== ETagNone || ((_c$1 = options.conditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifNoneMatch) && ((_d$1 = options.conditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch) !== ETagNone || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { var _a$3; const operationOptions = { abortSignal: options.abortSignal, breakPeriod: breakPeriod$1, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }), tracingOptions: updatedOptions.tracingOptions }; return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); }); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ var RetriableReadableStream = class extends stream$2.Readable { /** * Creates an instance of RetriableReadableStream. * * @param source - The current ReadableStream returned from getter * @param getter - A method calling downloading request returning * a new ReadableStream from specified offset * @param offset - Offset position in original data source to read * @param count - How much data in original data source to read * @param options - */ constructor(source, getter, offset, count, options = {}) { super({ highWaterMark: options.highWaterMark }); this.retries = 0; this.sourceDataHandler = (data) => { if (this.options.doInjectErrorOnce) { this.options.doInjectErrorOnce = void 0; this.source.pause(); this.sourceErrorOrEndHandler(); this.source.destroy(); return; } this.offset += data.length; if (this.onProgress) this.onProgress({ loadedBytes: this.offset - this.start }); if (!this.push(data)) this.source.pause(); }; this.sourceAbortedHandler = () => { const abortError = new abortController.AbortError("The operation was aborted."); this.destroy(abortError); }; this.sourceErrorOrEndHandler = (err) => { if (err && err.name === "AbortError") { this.destroy(err); return; } this.removeSourceEventHandlers(); if (this.offset - 1 === this.end) this.push(null); else if (this.offset <= this.end) if (this.retries < this.maxRetryRequests) { this.retries += 1; this.getter(this.offset).then((newSource) => { this.source = newSource; this.setSourceEventHandlers(); return; }).catch((error) => { this.destroy(error); }); } else this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); else this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); }; this.getter = getter; this.source = source; this.start = offset; this.offset = offset; this.end = offset + count - 1; this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; this.onProgress = options.onProgress; this.options = options; this.setSourceEventHandlers(); } _read() { this.source.resume(); } setSourceEventHandlers() { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); this.source.on("aborted", this.sourceAbortedHandler); } removeSourceEventHandlers() { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); this.source.removeListener("aborted", this.sourceAbortedHandler); } _destroy(error, callback) { this.removeSourceEventHandlers(); this.source.destroy(); callback(error === null ? void 0 : error); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot * trigger retries defined in pipeline retry policy.) * * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js * Readable stream. */ var BlobDownloadResponse = class { /** * Indicates that the service supports * requests for partial file content. * * @readonly */ get acceptRanges() { return this.originalResponse.acceptRanges; } /** * Returns if it was previously specified * for the file. * * @readonly */ get cacheControl() { return this.originalResponse.cacheControl; } /** * Returns the value that was specified * for the 'x-ms-content-disposition' header and specifies how to process the * response. * * @readonly */ get contentDisposition() { return this.originalResponse.contentDisposition; } /** * Returns the value that was specified * for the Content-Encoding request header. * * @readonly */ get contentEncoding() { return this.originalResponse.contentEncoding; } /** * Returns the value that was specified * for the Content-Language request header. * * @readonly */ get contentLanguage() { return this.originalResponse.contentLanguage; } /** * The current sequence number for a * page blob. This header is not returned for block blobs or append blobs. * * @readonly */ get blobSequenceNumber() { return this.originalResponse.blobSequenceNumber; } /** * The blob's type. Possible values include: * 'BlockBlob', 'PageBlob', 'AppendBlob'. * * @readonly */ get blobType() { return this.originalResponse.blobType; } /** * The number of bytes present in the * response body. * * @readonly */ get contentLength() { return this.originalResponse.contentLength; } /** * If the file has an MD5 hash and the * request is to read the full file, this response header is returned so that * the client can check for message content integrity. If the request is to * read a specified range and the 'x-ms-range-get-content-md5' is set to * true, then the request returns an MD5 hash for the range, as long as the * range size is less than or equal to 4 MB. If neither of these sets of * conditions is true, then no value is returned for the 'Content-MD5' * header. * * @readonly */ get contentMD5() { return this.originalResponse.contentMD5; } /** * Indicates the range of bytes returned if * the client requested a subset of the file by setting the Range request * header. * * @readonly */ get contentRange() { return this.originalResponse.contentRange; } /** * The content type specified for the file. * The default content type is 'application/octet-stream' * * @readonly */ get contentType() { return this.originalResponse.contentType; } /** * Conclusion time of the last attempted * Copy File operation where this file was the destination file. This value * can specify the time of a completed, aborted, or failed copy attempt. * * @readonly */ get copyCompletedOn() { return this.originalResponse.copyCompletedOn; } /** * String identifier for the last attempted Copy * File operation where this file was the destination file. * * @readonly */ get copyId() { return this.originalResponse.copyId; } /** * Contains the number of bytes copied and * the total bytes in the source in the last attempted Copy File operation * where this file was the destination file. Can show between 0 and * Content-Length bytes copied. * * @readonly */ get copyProgress() { return this.originalResponse.copyProgress; } /** * URL up to 2KB in length that specifies the * source file used in the last attempted Copy File operation where this file * was the destination file. * * @readonly */ get copySource() { return this.originalResponse.copySource; } /** * State of the copy operation * identified by 'x-ms-copy-id'. Possible values include: 'pending', * 'success', 'aborted', 'failed' * * @readonly */ get copyStatus() { return this.originalResponse.copyStatus; } /** * Only appears when * x-ms-copy-status is failed or pending. Describes cause of fatal or * non-fatal copy operation failure. * * @readonly */ get copyStatusDescription() { return this.originalResponse.copyStatusDescription; } /** * When a blob is leased, * specifies whether the lease is of infinite or fixed duration. Possible * values include: 'infinite', 'fixed'. * * @readonly */ get leaseDuration() { return this.originalResponse.leaseDuration; } /** * Lease state of the blob. Possible * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * * @readonly */ get leaseState() { return this.originalResponse.leaseState; } /** * The current lease status of the * blob. Possible values include: 'locked', 'unlocked'. * * @readonly */ get leaseStatus() { return this.originalResponse.leaseStatus; } /** * A UTC date/time value generated by the service that * indicates the time at which the response was initiated. * * @readonly */ get date() { return this.originalResponse.date; } /** * The number of committed blocks * present in the blob. This header is returned only for append blobs. * * @readonly */ get blobCommittedBlockCount() { return this.originalResponse.blobCommittedBlockCount; } /** * The ETag contains a value that you can use to * perform operations conditionally, in quotes. * * @readonly */ get etag() { return this.originalResponse.etag; } /** * The number of tags associated with the blob * * @readonly */ get tagCount() { return this.originalResponse.tagCount; } /** * The error code. * * @readonly */ get errorCode() { return this.originalResponse.errorCode; } /** * The value of this header is set to * true if the file data and application metadata are completely encrypted * using the specified algorithm. Otherwise, the value is set to false (when * the file is unencrypted, or if only parts of the file/application metadata * are encrypted). * * @readonly */ get isServerEncrypted() { return this.originalResponse.isServerEncrypted; } /** * If the blob has a MD5 hash, and if * request contains range header (Range or x-ms-range), this response header * is returned with the value of the whole blob's MD5 value. This value may * or may not be equal to the value returned in Content-MD5 header, with the * latter calculated from the requested range. * * @readonly */ get blobContentMD5() { return this.originalResponse.blobContentMD5; } /** * Returns the date and time the file was last * modified. Any operation that modifies the file or its properties updates * the last modified time. * * @readonly */ get lastModified() { return this.originalResponse.lastModified; } /** * Returns the UTC date and time generated by the service that indicates the time at which the blob was * last read or written to. * * @readonly */ get lastAccessed() { return this.originalResponse.lastAccessed; } /** * Returns the date and time the blob was created. * * @readonly */ get createdOn() { return this.originalResponse.createdOn; } /** * A name-value pair * to associate with a file storage object. * * @readonly */ get metadata() { return this.originalResponse.metadata; } /** * This header uniquely identifies the request * that was made and can be used for troubleshooting the request. * * @readonly */ get requestId() { return this.originalResponse.requestId; } /** * If a client request id header is sent in the request, this header will be present in the * response with the same value. * * @readonly */ get clientRequestId() { return this.originalResponse.clientRequestId; } /** * Indicates the version of the Blob service used * to execute the request. * * @readonly */ get version() { return this.originalResponse.version; } /** * Indicates the versionId of the downloaded blob version. * * @readonly */ get versionId() { return this.originalResponse.versionId; } /** * Indicates whether version of this blob is a current version. * * @readonly */ get isCurrentVersion() { return this.originalResponse.isCurrentVersion; } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. * * @readonly */ get encryptionKeySha256() { return this.originalResponse.encryptionKeySha256; } /** * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to * true, then the request returns a crc64 for the range, as long as the range size is less than * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is * specified in the same request, it will fail with 400(Bad Request) */ get contentCrc64() { return this.originalResponse.contentCrc64; } /** * Object Replication Policy Id of the destination blob. * * @readonly */ get objectReplicationDestinationPolicyId() { return this.originalResponse.objectReplicationDestinationPolicyId; } /** * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. * * @readonly */ get objectReplicationSourceProperties() { return this.originalResponse.objectReplicationSourceProperties; } /** * If this blob has been sealed. * * @readonly */ get isSealed() { return this.originalResponse.isSealed; } /** * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. * * @readonly */ get immutabilityPolicyExpiresOn() { return this.originalResponse.immutabilityPolicyExpiresOn; } /** * Indicates immutability policy mode. * * @readonly */ get immutabilityPolicyMode() { return this.originalResponse.immutabilityPolicyMode; } /** * Indicates if a legal hold is present on the blob. * * @readonly */ get legalHold() { return this.originalResponse.legalHold; } /** * The response body as a browser Blob. * Always undefined in node.js. * * @readonly */ get contentAsBlob() { return this.originalResponse.blobBody; } /** * The response body as a node.js Readable stream. * Always undefined in the browser. * * It will automatically retry when internal read stream unexpected ends. * * @readonly */ get readableStreamBody() { return coreUtil.isNode ? this.blobDownloadStream : void 0; } /** * The HTTP response. */ get _response() { return this.originalResponse._response; } /** * Creates an instance of BlobDownloadResponse. * * @param originalResponse - * @param getter - * @param offset - * @param count - * @param options - */ constructor(originalResponse$1, getter, offset, count, options = {}) { this.originalResponse = originalResponse$1; this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } }; const AVRO_SYNC_MARKER_SIZE = 16; const AVRO_INIT_BYTES = new Uint8Array([ 79, 98, 106, 1 ]); const AVRO_CODEC_KEY = "avro.codec"; const AVRO_SCHEMA_KEY = "avro.schema"; var AvroParser = class AvroParser { /** * Reads a fixed number of bytes from the stream. * * @param stream - * @param length - * @param options - */ static async readFixedBytes(stream$3, length, options = {}) { const bytes = await stream$3.read(length, { abortSignal: options.abortSignal }); if (bytes.length !== length) throw new Error("Hit stream end."); return bytes; } /** * Reads a single byte from the stream. * * @param stream - * @param options - */ static async readByte(stream$3, options = {}) { const buf = await AvroParser.readFixedBytes(stream$3, 1, options); return buf[0]; } static async readZigZagLong(stream$3, options = {}) { let zigZagEncoded = 0; let significanceInBit = 0; let byte, haveMoreByte, significanceInFloat; do { byte = await AvroParser.readByte(stream$3, options); haveMoreByte = byte & 128; zigZagEncoded |= (byte & 127) << significanceInBit; significanceInBit += 7; } while (haveMoreByte && significanceInBit < 28); if (haveMoreByte) { zigZagEncoded = zigZagEncoded; significanceInFloat = 268435456; do { byte = await AvroParser.readByte(stream$3, options); zigZagEncoded += (byte & 127) * significanceInFloat; significanceInFloat *= 128; } while (byte & 128); const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) throw new Error("Integer overflow."); return res; } return zigZagEncoded >> 1 ^ -(zigZagEncoded & 1); } static async readLong(stream$3, options = {}) { return AvroParser.readZigZagLong(stream$3, options); } static async readInt(stream$3, options = {}) { return AvroParser.readZigZagLong(stream$3, options); } static async readNull() { return null; } static async readBoolean(stream$3, options = {}) { const b = await AvroParser.readByte(stream$3, options); if (b === 1) return true; else if (b === 0) return false; else throw new Error("Byte was not a boolean."); } static async readFloat(stream$3, options = {}) { const u8arr = await AvroParser.readFixedBytes(stream$3, 4, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat32(0, true); } static async readDouble(stream$3, options = {}) { const u8arr = await AvroParser.readFixedBytes(stream$3, 8, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat64(0, true); } static async readBytes(stream$3, options = {}) { const size = await AvroParser.readLong(stream$3, options); if (size < 0) throw new Error("Bytes size was negative."); return stream$3.read(size, { abortSignal: options.abortSignal }); } static async readString(stream$3, options = {}) { const u8arr = await AvroParser.readBytes(stream$3, options); const utf8decoder = new TextDecoder(); return utf8decoder.decode(u8arr); } static async readMapPair(stream$3, readItemMethod, options = {}) { const key = await AvroParser.readString(stream$3, options); const value = await readItemMethod(stream$3, options); return { key, value }; } static async readMap(stream$3, readItemMethod, options = {}) { const readPairMethod = (s$1, opts = {}) => { return AvroParser.readMapPair(s$1, readItemMethod, opts); }; const pairs = await AvroParser.readArray(stream$3, readPairMethod, options); const dict = {}; for (const pair of pairs) dict[pair.key] = pair.value; return dict; } static async readArray(stream$3, readItemMethod, options = {}) { const items = []; for (let count = await AvroParser.readLong(stream$3, options); count !== 0; count = await AvroParser.readLong(stream$3, options)) { if (count < 0) { await AvroParser.readLong(stream$3, options); count = -count; } while (count--) { const item = await readItemMethod(stream$3, options); items.push(item); } } return items; } }; var AvroComplex; (function(AvroComplex$1) { AvroComplex$1["RECORD"] = "record"; AvroComplex$1["ENUM"] = "enum"; AvroComplex$1["ARRAY"] = "array"; AvroComplex$1["MAP"] = "map"; AvroComplex$1["UNION"] = "union"; AvroComplex$1["FIXED"] = "fixed"; })(AvroComplex || (AvroComplex = {})); var AvroPrimitive; (function(AvroPrimitive$1) { AvroPrimitive$1["NULL"] = "null"; AvroPrimitive$1["BOOLEAN"] = "boolean"; AvroPrimitive$1["INT"] = "int"; AvroPrimitive$1["LONG"] = "long"; AvroPrimitive$1["FLOAT"] = "float"; AvroPrimitive$1["DOUBLE"] = "double"; AvroPrimitive$1["BYTES"] = "bytes"; AvroPrimitive$1["STRING"] = "string"; })(AvroPrimitive || (AvroPrimitive = {})); var AvroType = class AvroType { /** * Determines the AvroType from the Avro Schema. */ static fromSchema(schema) { if (typeof schema === "string") return AvroType.fromStringSchema(schema); else if (Array.isArray(schema)) return AvroType.fromArraySchema(schema); else return AvroType.fromObjectSchema(schema); } static fromStringSchema(schema) { switch (schema) { case AvroPrimitive.NULL: case AvroPrimitive.BOOLEAN: case AvroPrimitive.INT: case AvroPrimitive.LONG: case AvroPrimitive.FLOAT: case AvroPrimitive.DOUBLE: case AvroPrimitive.BYTES: case AvroPrimitive.STRING: return new AvroPrimitiveType(schema); default: throw new Error(`Unexpected Avro type ${schema}`); } } static fromArraySchema(schema) { return new AvroUnionType(schema.map(AvroType.fromSchema)); } static fromObjectSchema(schema) { const type = schema.type; try { return AvroType.fromStringSchema(type); } catch (_a$2) {} switch (type) { case AvroComplex.RECORD: if (schema.aliases) throw new Error(`aliases currently is not supported, schema: ${schema}`); if (!schema.name) throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); const fields = {}; if (!schema.fields) throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); for (const field of schema.fields) fields[field.name] = AvroType.fromSchema(field.type); return new AvroRecordType(fields, schema.name); case AvroComplex.ENUM: if (schema.aliases) throw new Error(`aliases currently is not supported, schema: ${schema}`); if (!schema.symbols) throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); return new AvroEnumType(schema.symbols); case AvroComplex.MAP: if (!schema.values) throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); return new AvroMapType(AvroType.fromSchema(schema.values)); case AvroComplex.ARRAY: case AvroComplex.FIXED: default: throw new Error(`Unexpected Avro type ${type} in ${schema}`); } } }; var AvroPrimitiveType = class extends AvroType { constructor(primitive) { super(); this._primitive = primitive; } read(stream$3, options = {}) { switch (this._primitive) { case AvroPrimitive.NULL: return AvroParser.readNull(); case AvroPrimitive.BOOLEAN: return AvroParser.readBoolean(stream$3, options); case AvroPrimitive.INT: return AvroParser.readInt(stream$3, options); case AvroPrimitive.LONG: return AvroParser.readLong(stream$3, options); case AvroPrimitive.FLOAT: return AvroParser.readFloat(stream$3, options); case AvroPrimitive.DOUBLE: return AvroParser.readDouble(stream$3, options); case AvroPrimitive.BYTES: return AvroParser.readBytes(stream$3, options); case AvroPrimitive.STRING: return AvroParser.readString(stream$3, options); default: throw new Error("Unknown Avro Primitive"); } } }; var AvroEnumType = class extends AvroType { constructor(symbols) { super(); this._symbols = symbols; } async read(stream$3, options = {}) { const value = await AvroParser.readInt(stream$3, options); return this._symbols[value]; } }; var AvroUnionType = class extends AvroType { constructor(types) { super(); this._types = types; } async read(stream$3, options = {}) { const typeIndex = await AvroParser.readInt(stream$3, options); return this._types[typeIndex].read(stream$3, options); } }; var AvroMapType = class extends AvroType { constructor(itemType) { super(); this._itemType = itemType; } read(stream$3, options = {}) { const readItemMethod = (s$1, opts) => { return this._itemType.read(s$1, opts); }; return AvroParser.readMap(stream$3, readItemMethod, options); } }; var AvroRecordType = class extends AvroType { constructor(fields, name) { super(); this._fields = fields; this._name = name; } async read(stream$3, options = {}) { const record = {}; record["$schema"] = this._name; for (const key in this._fields) if (Object.prototype.hasOwnProperty.call(this._fields, key)) record[key] = await this._fields[key].read(stream$3, options); return record; } }; function arraysEqual(a, b) { if (a === b) return true; if (a == null || b == null) return false; if (a.length !== b.length) return false; for (let i = 0; i < a.length; ++i) if (a[i] !== b[i]) return false; return true; } var AvroReader = class { get blockOffset() { return this._blockOffset; } get objectIndex() { return this._objectIndex; } constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { this._dataStream = dataStream; this._headerStream = headerStream || dataStream; this._initialized = false; this._blockOffset = currentBlockOffset || 0; this._objectIndex = indexWithinCurrentBlock || 0; this._initialBlockOffset = currentBlockOffset || 0; } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { abortSignal: options.abortSignal }); if (!arraysEqual(header, AVRO_INIT_BYTES)) throw new Error("Stream is not an Avro file."); this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { abortSignal: options.abortSignal }); const codec = this._metadata[AVRO_CODEC_KEY]; if (!(codec === void 0 || codec === null || codec === "null")) throw new Error("Codecs are not supported"); this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { abortSignal: options.abortSignal }); const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); this._itemType = AvroType.fromSchema(schema); if (this._blockOffset === 0) this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); this._initialized = true; if (this._objectIndex && this._objectIndex > 0) for (let i = 0; i < this._objectIndex; i++) { await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); this._itemsRemainingInBlock--; } } hasNext() { return !this._initialized || this._itemsRemainingInBlock > 0; } parseObjects() { return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { if (!this._initialized) yield tslib.__await(this.initialize(options)); while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { abortSignal: options.abortSignal })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock === 0) { const marker$1 = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { abortSignal: options.abortSignal })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; if (!arraysEqual(this._syncMarker, marker$1)) throw new Error("Stream is not a valid Avro file."); try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); } catch (_a$2) { this._itemsRemainingInBlock = 0; } if (this._itemsRemainingInBlock > 0) yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); } yield yield tslib.__await(result); } }); } }; var AvroReadable = class {}; const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); var AvroReadableFromStream = class extends AvroReadable { toUint8Array(data) { if (typeof data === "string") return Buffer.from(data); return data; } constructor(readable) { super(); this._readable = readable; this._position = 0; } get position() { return this._position; } async read(size, options = {}) { var _a$2; if ((_a$2 = options.abortSignal) === null || _a$2 === void 0 ? void 0 : _a$2.aborted) throw ABORT_ERROR; if (size < 0) throw new Error(`size parameter should be positive: ${size}`); if (size === 0) return new Uint8Array(); if (!this._readable.readable) throw new Error("Stream no longer readable."); const chunk = this._readable.read(size); if (chunk) { this._position += chunk.length; return this.toUint8Array(chunk); } else return new Promise((resolve, reject) => { const cleanUp = () => { this._readable.removeListener("readable", readableCallback); this._readable.removeListener("error", rejectCallback); this._readable.removeListener("end", rejectCallback); this._readable.removeListener("close", rejectCallback); if (options.abortSignal) options.abortSignal.removeEventListener("abort", abortHandler); }; const readableCallback = () => { const callbackChunk = this._readable.read(size); if (callbackChunk) { this._position += callbackChunk.length; cleanUp(); resolve(this.toUint8Array(callbackChunk)); } }; const rejectCallback = () => { cleanUp(); reject(); }; const abortHandler = () => { cleanUp(); reject(ABORT_ERROR); }; this._readable.on("readable", readableCallback); this._readable.once("error", rejectCallback); this._readable.once("end", rejectCallback); this._readable.once("close", rejectCallback); if (options.abortSignal) options.abortSignal.addEventListener("abort", abortHandler); }); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. */ var BlobQuickQueryStream = class extends stream$2.Readable { /** * Creates an instance of BlobQuickQueryStream. * * @param source - The current ReadableStream returned from getter * @param options - */ constructor(source, options = {}) { super(); this.avroPaused = true; this.source = source; this.onProgress = options.onProgress; this.onError = options.onError; this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } _read() { if (this.avroPaused) this.readInternal().catch((err) => { this.emit("error", err); }); } async readInternal() { this.avroPaused = false; let avroNext; do { avroNext = await this.avroIter.next(); if (avroNext.done) break; const obj = avroNext.value; const schema = obj.$schema; if (typeof schema !== "string") throw Error("Missing schema in avro record."); switch (schema) { case "com.microsoft.azure.storage.queryBlobContents.resultData": { const data = obj.data; if (data instanceof Uint8Array === false) throw Error("Invalid data in avro result record."); if (!this.push(Buffer.from(data))) this.avroPaused = true; } break; case "com.microsoft.azure.storage.queryBlobContents.progress": { const bytesScanned = obj.bytesScanned; if (typeof bytesScanned !== "number") throw Error("Invalid bytesScanned in avro progress record."); if (this.onProgress) this.onProgress({ loadedBytes: bytesScanned }); } break; case "com.microsoft.azure.storage.queryBlobContents.end": if (this.onProgress) { const totalBytes = obj.totalBytes; if (typeof totalBytes !== "number") throw Error("Invalid totalBytes in avro end record."); this.onProgress({ loadedBytes: totalBytes }); } this.push(null); break; case "com.microsoft.azure.storage.queryBlobContents.error": if (this.onError) { const fatal = obj.fatal; if (typeof fatal !== "boolean") throw Error("Invalid fatal in avro error record."); const name = obj.name; if (typeof name !== "string") throw Error("Invalid name in avro error record."); const description = obj.description; if (typeof description !== "string") throw Error("Invalid description in avro error record."); const position = obj.position; if (typeof position !== "number") throw Error("Invalid position in avro error record."); this.onError({ position, name, isFatal: fatal, description }); } break; default: throw Error(`Unknown schema ${schema} in avro progress record.`); } } while (!avroNext.done && !this.avroPaused); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will * parse avor data returned by blob query. */ var BlobQueryResponse = class { /** * Indicates that the service supports * requests for partial file content. * * @readonly */ get acceptRanges() { return this.originalResponse.acceptRanges; } /** * Returns if it was previously specified * for the file. * * @readonly */ get cacheControl() { return this.originalResponse.cacheControl; } /** * Returns the value that was specified * for the 'x-ms-content-disposition' header and specifies how to process the * response. * * @readonly */ get contentDisposition() { return this.originalResponse.contentDisposition; } /** * Returns the value that was specified * for the Content-Encoding request header. * * @readonly */ get contentEncoding() { return this.originalResponse.contentEncoding; } /** * Returns the value that was specified * for the Content-Language request header. * * @readonly */ get contentLanguage() { return this.originalResponse.contentLanguage; } /** * The current sequence number for a * page blob. This header is not returned for block blobs or append blobs. * * @readonly */ get blobSequenceNumber() { return this.originalResponse.blobSequenceNumber; } /** * The blob's type. Possible values include: * 'BlockBlob', 'PageBlob', 'AppendBlob'. * * @readonly */ get blobType() { return this.originalResponse.blobType; } /** * The number of bytes present in the * response body. * * @readonly */ get contentLength() { return this.originalResponse.contentLength; } /** * If the file has an MD5 hash and the * request is to read the full file, this response header is returned so that * the client can check for message content integrity. If the request is to * read a specified range and the 'x-ms-range-get-content-md5' is set to * true, then the request returns an MD5 hash for the range, as long as the * range size is less than or equal to 4 MB. If neither of these sets of * conditions is true, then no value is returned for the 'Content-MD5' * header. * * @readonly */ get contentMD5() { return this.originalResponse.contentMD5; } /** * Indicates the range of bytes returned if * the client requested a subset of the file by setting the Range request * header. * * @readonly */ get contentRange() { return this.originalResponse.contentRange; } /** * The content type specified for the file. * The default content type is 'application/octet-stream' * * @readonly */ get contentType() { return this.originalResponse.contentType; } /** * Conclusion time of the last attempted * Copy File operation where this file was the destination file. This value * can specify the time of a completed, aborted, or failed copy attempt. * * @readonly */ get copyCompletedOn() { return void 0; } /** * String identifier for the last attempted Copy * File operation where this file was the destination file. * * @readonly */ get copyId() { return this.originalResponse.copyId; } /** * Contains the number of bytes copied and * the total bytes in the source in the last attempted Copy File operation * where this file was the destination file. Can show between 0 and * Content-Length bytes copied. * * @readonly */ get copyProgress() { return this.originalResponse.copyProgress; } /** * URL up to 2KB in length that specifies the * source file used in the last attempted Copy File operation where this file * was the destination file. * * @readonly */ get copySource() { return this.originalResponse.copySource; } /** * State of the copy operation * identified by 'x-ms-copy-id'. Possible values include: 'pending', * 'success', 'aborted', 'failed' * * @readonly */ get copyStatus() { return this.originalResponse.copyStatus; } /** * Only appears when * x-ms-copy-status is failed or pending. Describes cause of fatal or * non-fatal copy operation failure. * * @readonly */ get copyStatusDescription() { return this.originalResponse.copyStatusDescription; } /** * When a blob is leased, * specifies whether the lease is of infinite or fixed duration. Possible * values include: 'infinite', 'fixed'. * * @readonly */ get leaseDuration() { return this.originalResponse.leaseDuration; } /** * Lease state of the blob. Possible * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * * @readonly */ get leaseState() { return this.originalResponse.leaseState; } /** * The current lease status of the * blob. Possible values include: 'locked', 'unlocked'. * * @readonly */ get leaseStatus() { return this.originalResponse.leaseStatus; } /** * A UTC date/time value generated by the service that * indicates the time at which the response was initiated. * * @readonly */ get date() { return this.originalResponse.date; } /** * The number of committed blocks * present in the blob. This header is returned only for append blobs. * * @readonly */ get blobCommittedBlockCount() { return this.originalResponse.blobCommittedBlockCount; } /** * The ETag contains a value that you can use to * perform operations conditionally, in quotes. * * @readonly */ get etag() { return this.originalResponse.etag; } /** * The error code. * * @readonly */ get errorCode() { return this.originalResponse.errorCode; } /** * The value of this header is set to * true if the file data and application metadata are completely encrypted * using the specified algorithm. Otherwise, the value is set to false (when * the file is unencrypted, or if only parts of the file/application metadata * are encrypted). * * @readonly */ get isServerEncrypted() { return this.originalResponse.isServerEncrypted; } /** * If the blob has a MD5 hash, and if * request contains range header (Range or x-ms-range), this response header * is returned with the value of the whole blob's MD5 value. This value may * or may not be equal to the value returned in Content-MD5 header, with the * latter calculated from the requested range. * * @readonly */ get blobContentMD5() { return this.originalResponse.blobContentMD5; } /** * Returns the date and time the file was last * modified. Any operation that modifies the file or its properties updates * the last modified time. * * @readonly */ get lastModified() { return this.originalResponse.lastModified; } /** * A name-value pair * to associate with a file storage object. * * @readonly */ get metadata() { return this.originalResponse.metadata; } /** * This header uniquely identifies the request * that was made and can be used for troubleshooting the request. * * @readonly */ get requestId() { return this.originalResponse.requestId; } /** * If a client request id header is sent in the request, this header will be present in the * response with the same value. * * @readonly */ get clientRequestId() { return this.originalResponse.clientRequestId; } /** * Indicates the version of the File service used * to execute the request. * * @readonly */ get version() { return this.originalResponse.version; } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. * * @readonly */ get encryptionKeySha256() { return this.originalResponse.encryptionKeySha256; } /** * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to * true, then the request returns a crc64 for the range, as long as the range size is less than * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is * specified in the same request, it will fail with 400(Bad Request) */ get contentCrc64() { return this.originalResponse.contentCrc64; } /** * The response body as a browser Blob. * Always undefined in node.js. * * @readonly */ get blobBody() { return void 0; } /** * The response body as a node.js Readable stream. * Always undefined in the browser. * * It will parse avor data returned by blob query. * * @readonly */ get readableStreamBody() { return coreUtil.isNode ? this.blobDownloadStream : void 0; } /** * The HTTP response. */ get _response() { return this.originalResponse._response; } /** * Creates an instance of BlobQueryResponse. * * @param originalResponse - * @param options - */ constructor(originalResponse$1, options = {}) { this.originalResponse = originalResponse$1; this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } }; /** * Represents the access tier on a blob. * For detailed information about block blob level tiering see {@link https://learn.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} */ exports.BlockBlobTier = void 0; (function(BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. */ BlockBlobTier["Hot"] = "Hot"; /** * Optimized for storing data that is infrequently accessed and stored for at least 30 days. */ BlockBlobTier["Cool"] = "Cool"; /** * Optimized for storing data that is rarely accessed. */ BlockBlobTier["Cold"] = "Cold"; /** * Optimized for storing data that is rarely accessed and stored for at least 180 days * with flexible latency requirements (on the order of hours). */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); /** * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. * Please see {@link https://learn.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} * for detailed information on the corresponding IOPS and throughput per PageBlobTier. */ exports.PremiumPageBlobTier = void 0; (function(PremiumPageBlobTier) { /** * P4 Tier. */ PremiumPageBlobTier["P4"] = "P4"; /** * P6 Tier. */ PremiumPageBlobTier["P6"] = "P6"; /** * P10 Tier. */ PremiumPageBlobTier["P10"] = "P10"; /** * P15 Tier. */ PremiumPageBlobTier["P15"] = "P15"; /** * P20 Tier. */ PremiumPageBlobTier["P20"] = "P20"; /** * P30 Tier. */ PremiumPageBlobTier["P30"] = "P30"; /** * P40 Tier. */ PremiumPageBlobTier["P40"] = "P40"; /** * P50 Tier. */ PremiumPageBlobTier["P50"] = "P50"; /** * P60 Tier. */ PremiumPageBlobTier["P60"] = "P60"; /** * P70 Tier. */ PremiumPageBlobTier["P70"] = "P70"; /** * P80 Tier. */ PremiumPageBlobTier["P80"] = "P80"; })(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); function toAccessTier(tier$1) { if (tier$1 === void 0) return void 0; return tier$1; } function ensureCpkIfSpecified(cpk, isHttps) { if (cpk && !isHttps) throw new RangeError("Customer-provided encryption key must be used over HTTPS."); if (cpk && !cpk.encryptionAlgorithm) cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } /** * Defines the known cloud audiences for Storage. */ exports.StorageBlobAudience = void 0; (function(StorageBlobAudience) { /** * The OAuth scope to use to retrieve an AAD token for Azure Storage. */ StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; /** * The OAuth scope to use to retrieve an AAD token for Azure Disk. */ StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; })(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); /** * * To get OAuth audience for a storage account for blob service. */ function getBlobServiceAccountAudience(storageAccountName) { return `https://${storageAccountName}.blob.core.windows.net/.default`; } /** * Function that converts PageRange and ClearRange to a common Range object. * PageRange and ClearRange have start and end while Range offset and count * this function normalizes to Range. * @param response - Model PageBlob Range response */ function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, count: x.end - x.start })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, count: x.end - x.start })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, clearRange } }) }); } /** * This is the poller returned by {@link BlobClient.beginCopyFromURL}. * This can not be instantiated directly outside of this package. * * @hidden */ var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { constructor(options) { const { blobClient, copySource: copySource$1, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; let state$1; if (resumeFrom) state$1 = JSON.parse(resumeFrom).state; const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state$1), { blobClient, copySource: copySource$1, startCopyFromURLOptions })); super(operation); if (typeof onProgress === "function") this.onProgress(onProgress); this.intervalInMs = intervalInMs; } delay() { return coreUtil.delay(this.intervalInMs); } }; /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const cancel = async function cancel$1(options = {}) { const state$1 = this.state; const { copyId: copyId$1 } = state$1; if (state$1.isCompleted) return makeBlobBeginCopyFromURLPollOperation(state$1); if (!copyId$1) { state$1.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state$1); } await state$1.blobClient.abortCopyFromURL(copyId$1, { abortSignal: options.abortSignal }); state$1.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state$1); }; /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const update = async function update$1(options = {}) { const state$1 = this.state; const { blobClient, copySource: copySource$1, startCopyFromURLOptions } = state$1; if (!state$1.isStarted) { state$1.isStarted = true; const result = await blobClient.startCopyFromURL(copySource$1, startCopyFromURLOptions); state$1.copyId = result.copyId; if (result.copyStatus === "success") { state$1.result = result; state$1.isCompleted = true; } } else if (!state$1.isCompleted) try { const result = await state$1.blobClient.getProperties({ abortSignal: options.abortSignal }); const { copyStatus, copyProgress } = result; const prevCopyProgress = state$1.copyProgress; if (copyProgress) state$1.copyProgress = copyProgress; if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") options.fireProgress(state$1); else if (copyStatus === "success") { state$1.result = result; state$1.isCompleted = true; } else if (copyStatus === "failed") { state$1.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); state$1.isCompleted = true; } } catch (err) { state$1.error = err; state$1.isCompleted = true; } return makeBlobBeginCopyFromURLPollOperation(state$1); }; /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const toString = function toString$1() { return JSON.stringify({ state: this.state }, (key, value) => { if (key === "blobClient") return void 0; return value; }); }; /** * Creates a poll operation given the provided state. * @hidden */ function makeBlobBeginCopyFromURLPollOperation(state$1) { return { state: Object.assign({}, state$1), cancel, toString, update }; } /** * Generate a range string. For example: * * "bytes=255-" or "bytes=0-511" * * @param iRange - */ function rangeToString(iRange) { if (iRange.offset < 0) throw new RangeError(`Range.offset cannot be smaller than 0.`); if (iRange.count && iRange.count <= 0) throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; } /** * States for Batch. */ var BatchStates; (function(BatchStates$1) { BatchStates$1[BatchStates$1["Good"] = 0] = "Good"; BatchStates$1[BatchStates$1["Error"] = 1] = "Error"; })(BatchStates || (BatchStates = {})); /** * Batch provides basic parallel execution with concurrency limits. * Will stop execute left operations when one of the executed operation throws an error. * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. */ var Batch = class { /** * Creates an instance of Batch. * @param concurrency - */ constructor(concurrency = 5) { /** * Number of active operations under execution. */ this.actives = 0; /** * Number of completed operations under execution. */ this.completed = 0; /** * Offset of next operation to be executed. */ this.offset = 0; /** * Operation array to be executed. */ this.operations = []; /** * States of Batch. When an error happens, state will turn into error. * Batch will stop execute left operations. */ this.state = BatchStates.Good; if (concurrency < 1) throw new RangeError("concurrency must be larger than 0"); this.concurrency = concurrency; this.emitter = new events.EventEmitter(); } /** * Add a operation into queue. * * @param operation - */ addOperation(operation) { this.operations.push(async () => { try { this.actives++; await operation(); this.actives--; this.completed++; this.parallelExecute(); } catch (error) { this.emitter.emit("error", error); } }); } /** * Start execute operations in the queue. * */ async do() { if (this.operations.length === 0) return Promise.resolve(); this.parallelExecute(); return new Promise((resolve, reject) => { this.emitter.on("finish", resolve); this.emitter.on("error", (error) => { this.state = BatchStates.Error; reject(error); }); }); } /** * Get next operation to be executed. Return null when reaching ends. * */ nextOperation() { if (this.offset < this.operations.length) return this.operations[this.offset++]; return null; } /** * Start execute operations. One one the most important difference between * this method with do() is that do() wraps as an sync method. * */ parallelExecute() { if (this.state === BatchStates.Error) return; if (this.completed >= this.operations.length) { this.emitter.emit("finish"); return; } while (this.actives < this.concurrency) { const operation = this.nextOperation(); if (operation) operation(); else return; } } }; /** * This class generates a readable stream from the data in an array of buffers. */ var BuffersStream = class extends stream$2.Readable { /** * Creates an instance of BuffersStream that will emit the data * contained in the array of buffers. * * @param buffers - Array of buffers containing the data * @param byteLength - The total length of data contained in the buffers */ constructor(buffers, byteLength, options) { super(options); this.buffers = buffers; this.byteLength = byteLength; this.byteOffsetInCurrentBuffer = 0; this.bufferIndex = 0; this.pushedBytesLength = 0; let buffersLength = 0; for (const buf of this.buffers) buffersLength += buf.byteLength; if (buffersLength < this.byteLength) throw new Error("Data size shouldn't be larger than the total length of buffers."); } /** * Internal _read() that will be called when the stream wants to pull more data in. * * @param size - Optional. The size of data to be read */ _read(size) { if (this.pushedBytesLength >= this.byteLength) this.push(null); if (!size) size = this.readableHighWaterMark; const outBuffers = []; let i = 0; while (i < size && this.pushedBytesLength < this.byteLength) { const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); if (remaining > size - i) { const end = this.byteOffsetInCurrentBuffer + size - i; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); this.pushedBytesLength += size - i; this.byteOffsetInCurrentBuffer = end; i = size; break; } else { const end = this.byteOffsetInCurrentBuffer + remaining; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); if (remaining === remainingCapacityInThisBuffer) { this.byteOffsetInCurrentBuffer = 0; this.bufferIndex++; } else this.byteOffsetInCurrentBuffer = end; this.pushedBytesLength += remaining; i += remaining; } } if (outBuffers.length > 1) this.push(Buffer.concat(outBuffers)); else if (outBuffers.length === 1) this.push(outBuffers[0]); } }; const maxBufferLength = buffer$1.constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers * into the internal "buffer" serially with respect to the total length. * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream * assembled from all the data in the internal "buffer". */ var PooledBuffer = class { /** * The size of the data contained in the pooled buffers. */ get size() { return this._size; } constructor(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. * Each buffer has a length of the maxBufferLength except last one. */ this.buffers = []; this.capacity = capacity; this._size = 0; const bufferNum = Math.ceil(capacity / maxBufferLength); for (let i = 0; i < bufferNum; i++) { let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; if (len === 0) len = maxBufferLength; this.buffers.push(Buffer.allocUnsafe(len)); } if (buffers) this.fill(buffers, totalLength); } /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. * Data copied will be shift out of the input buffers. * * @param buffers - Input buffers containing the data to be filled in the pooled buffer * @param totalLength - Total length of the data to be filled in. * */ fill(buffers, totalLength) { this._size = Math.min(this.capacity, totalLength); let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; while (totalCopiedNum < this._size) { const source = buffers[i]; const target = this.buffers[j]; const copiedNum = source.copy(target, targetOffset, sourceOffset); totalCopiedNum += copiedNum; sourceOffset += copiedNum; targetOffset += copiedNum; if (sourceOffset === source.length) { i++; sourceOffset = 0; } if (targetOffset === target.length) { j++; targetOffset = 0; } } buffers.splice(0, i); if (buffers.length > 0) buffers[0] = buffers[0].slice(sourceOffset); } /** * Get the readable stream assembled from all the data in the internal buffers. * */ getReadableStream() { return new BuffersStream(this.buffers, this.size); } }; /** * This class accepts a Node.js Readable stream as input, and keeps reading data * from the stream into the internal buffer structure, until it reaches maxBuffers. * Every available buffer will try to trigger outgoingHandler. * * The internal buffer structure includes an incoming buffer array, and a outgoing * buffer array. The incoming buffer array includes the "empty" buffers can be filled * with new incoming data. The outgoing array includes the filled buffers to be * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. * * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING * * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers * * PERFORMANCE IMPROVEMENT TIPS: * 1. Input stream highWaterMark is better to set a same value with bufferSize * parameter, which will avoid Buffer.concat() operations. * 2. concurrency should set a smaller value than maxBuffers, which is helpful to * reduce the possibility when a outgoing handler waits for the stream data. * in this situation, outgoing handlers are blocked. * Outgoing queue shouldn't be empty. */ var BufferScheduler = class { /** * Creates an instance of BufferScheduler. * * @param readable - A Node.js Readable stream * @param bufferSize - Buffer size of every maintained buffer * @param maxBuffers - How many buffers can be allocated * @param outgoingHandler - An async function scheduled to be * triggered when a buffer fully filled * with stream data * @param concurrency - Concurrency of executing outgoingHandlers (>0) * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { /** * An internal event emitter. */ this.emitter = new events.EventEmitter(); /** * An internal offset marker to track data offset in bytes of next outgoingHandler. */ this.offset = 0; /** * An internal marker to track whether stream is end. */ this.isStreamEnd = false; /** * An internal marker to track whether stream or outgoingHandler returns error. */ this.isError = false; /** * How many handlers are executing. */ this.executingOutgoingHandlers = 0; /** * How many buffers have been allocated. */ this.numBuffers = 0; /** * Because this class doesn't know how much data every time stream pops, which * is defined by highWaterMarker of the stream. So BufferScheduler will cache * data received from the stream, when data in unresolvedDataArray exceeds the * blockSize defined, it will try to concat a blockSize of buffer, fill into available * buffers from incoming and push to outgoing array. */ this.unresolvedDataArray = []; /** * How much data consisted in unresolvedDataArray. */ this.unresolvedLength = 0; /** * The array includes all the available buffers can be used to fill data from stream. */ this.incoming = []; /** * The array (queue) includes all the buffers filled from stream data. */ this.outgoing = []; if (bufferSize <= 0) throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); if (maxBuffers <= 0) throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); if (concurrency <= 0) throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); this.bufferSize = bufferSize; this.maxBuffers = maxBuffers; this.readable = readable; this.outgoingHandler = outgoingHandler; this.concurrency = concurrency; this.encoding = encoding; } /** * Start the scheduler, will return error when stream of any of the outgoingHandlers * returns error. * */ async do() { return new Promise((resolve, reject) => { this.readable.on("data", (data) => { data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; this.appendUnresolvedData(data); if (!this.resolveData()) this.readable.pause(); }); this.readable.on("error", (err) => { this.emitter.emit("error", err); }); this.readable.on("end", () => { this.isStreamEnd = true; this.emitter.emit("checkEnd"); }); this.emitter.on("error", (err) => { this.isError = true; this.readable.pause(); reject(err); }); this.emitter.on("checkEnd", () => { if (this.outgoing.length > 0) { this.triggerOutgoingHandlers(); return; } if (this.isStreamEnd && this.executingOutgoingHandlers === 0) if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { const buffer$2 = this.shiftBufferFromUnresolvedDataArray(); this.outgoingHandler(() => buffer$2.getReadableStream(), buffer$2.size, this.offset).then(resolve).catch(reject); } else if (this.unresolvedLength >= this.bufferSize) return; else resolve(); }); }); } /** * Insert a new data into unresolved array. * * @param data - */ appendUnresolvedData(data) { this.unresolvedDataArray.push(data); this.unresolvedLength += data.length; } /** * Try to shift a buffer with size in blockSize. The buffer returned may be less * than blockSize when data in unresolvedDataArray is less than bufferSize. * */ shiftBufferFromUnresolvedDataArray(buffer$2) { if (!buffer$2) buffer$2 = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); else buffer$2.fill(this.unresolvedDataArray, this.unresolvedLength); this.unresolvedLength -= buffer$2.size; return buffer$2; } /** * Resolve data in unresolvedDataArray. For every buffer with size in blockSize * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, * then push it into outgoing to be handled by outgoing handler. * * Return false when available buffers in incoming are not enough, else true. * * @returns Return false when buffers in incoming are not enough, else true. */ resolveData() { while (this.unresolvedLength >= this.bufferSize) { let buffer$2; if (this.incoming.length > 0) { buffer$2 = this.incoming.shift(); this.shiftBufferFromUnresolvedDataArray(buffer$2); } else if (this.numBuffers < this.maxBuffers) { buffer$2 = this.shiftBufferFromUnresolvedDataArray(); this.numBuffers++; } else return false; this.outgoing.push(buffer$2); this.triggerOutgoingHandlers(); } return true; } /** * Try to trigger a outgoing handler for every buffer in outgoing. Stop when * concurrency reaches. */ async triggerOutgoingHandlers() { let buffer$2; do { if (this.executingOutgoingHandlers >= this.concurrency) return; buffer$2 = this.outgoing.shift(); if (buffer$2) this.triggerOutgoingHandler(buffer$2); } while (buffer$2); } /** * Trigger a outgoing handler for a buffer shifted from outgoing. * * @param buffer - */ async triggerOutgoingHandler(buffer$2) { const bufferLength = buffer$2.size; this.executingOutgoingHandlers++; this.offset += bufferLength; try { await this.outgoingHandler(() => buffer$2.getReadableStream(), bufferLength, this.offset - bufferLength); } catch (err) { this.emitter.emit("error", err); return; } this.executingOutgoingHandlers--; this.reuseBuffer(buffer$2); this.emitter.emit("checkEnd"); } /** * Return buffer used by outgoing handler into incoming. * * @param buffer - */ reuseBuffer(buffer$2) { this.incoming.push(buffer$2); if (!this.isError && this.resolveData() && !this.isStreamEnd) this.readable.resume(); } }; /** * Reads a readable stream into buffer. Fill the buffer from offset to end. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param offset - From which position in the buffer to be filled, inclusive * @param end - To which position in the buffer to be filled, exclusive * @param encoding - Encoding of the Readable stream */ async function streamToBuffer(stream$3, buffer$2, offset, end, encoding) { let pos = 0; const count = end - offset; return new Promise((resolve, reject) => { const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); stream$3.on("readable", () => { if (pos >= count) { clearTimeout(timeout); resolve(); return; } let chunk = stream$3.read(); if (!chunk) return; if (typeof chunk === "string") chunk = Buffer.from(chunk, encoding); const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; buffer$2.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); stream$3.on("end", () => { clearTimeout(timeout); if (pos < count) reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); resolve(); }); stream$3.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } /** * Reads a readable stream into buffer entirely. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param encoding - Encoding of the Readable stream * @returns with the count of bytes read. * @throws `RangeError` If buffer size is not big enough. */ async function streamToBuffer2(stream$3, buffer$2, encoding) { let pos = 0; const bufferSize = buffer$2.length; return new Promise((resolve, reject) => { stream$3.on("readable", () => { let chunk = stream$3.read(); if (!chunk) return; if (typeof chunk === "string") chunk = Buffer.from(chunk, encoding); if (pos + chunk.length > bufferSize) { reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); return; } buffer$2.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); stream$3.on("end", () => { resolve(pos); }); stream$3.on("error", reject); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. * * @param rs - The read stream. * @param file - Destination file path. */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); ws.on("error", (err) => { reject(err); }); ws.on("close", resolve); rs.pipe(ws); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Promisified version of fs.stat(). */ const fsStat = util__namespace.promisify(fs__namespace.stat); const fsCreateReadStream = fs__namespace.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, * append blob, or page blob. */ var BlobClient = class BlobClient extends StorageClient { /** * The name of the blob. */ get name() { return this._name; } /** * The name of the storage container the blob is associated with. */ get containerName() { return this._containerName; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { options = options || {}; let pipeline; let url$1; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url$1 = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") options = blobNameOrOptions; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } else throw new Error("Expecting non-empty strings for containerName and blobName parameters"); super(url$1, pipeline); ({blobName: this._name, containerName: this._containerName} = this.getBlobAndContainerNamesFromUrl()); this.blobContext = this.storageClientContext.blob; this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ withSnapshot(snapshot$1) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot$1.length === 0 ? void 0 : snapshot$1), this.pipeline); } /** * Creates a new BlobClient object pointing to a version of this blob. * Provide "" will remove the versionId and return a Client to the base blob. * * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ withVersion(versionId$1) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId$1.length === 0 ? void 0 : versionId$1), this.pipeline); } /** * Creates a AppendBlobClient object. * */ getAppendBlobClient() { return new AppendBlobClient(this.url, this.pipeline); } /** * Creates a BlockBlobClient object. * */ getBlockBlobClient() { return new BlockBlobClient(this.url, this.pipeline); } /** * Creates a PageBlobClient object. * */ getPageBlobClient() { return new PageBlobClient(this.url, this.pipeline); } /** * Reads or downloads a blob from the system, including its metadata and properties. * You can also call Get Blob to read a snapshot. * * * In Node.js, data returns in a Readable stream readableStreamBody * * In browsers, data returns in a promise blobBody * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob * * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined * @param options - Optional options to Blob Download operation. * * * Example usage (Node.js): * * ```js * // Download and convert a blob to a string * const downloadBlockBlobResponse = await blobClient.download(); * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); * console.log("Downloaded blob content:", downloaded.toString()); * * async function streamToBuffer(readableStream) { * return new Promise((resolve, reject) => { * const chunks = []; * readableStream.on("data", (data) => { * chunks.push(typeof data === "string" ? Buffer.from(data) : data); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); * readableStream.on("error", reject); * }); * } * ``` * * Example usage (browser): * * ```js * // Download and convert a blob to a string * const downloadBlockBlobResponse = await blobClient.download(); * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); * console.log( * "Downloaded blob content", * downloaded * ); * * async function blobToString(blob: Blob): Promise { * const fileReader = new FileReader(); * return new Promise((resolve, reject) => { * fileReader.onloadend = (ev: any) => { * resolve(ev.target!.result); * }; * fileReader.onerror = reject; * fileReader.readAsText(blob); * }); * } * ``` */ async download(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { var _a$2; const res = assertResponse(await this.blobContext.download({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), requestOptions: { onDownloadProgress: coreUtil.isNode ? void 0 : options.onProgress }, range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); if (!coreUtil.isNode) return wrappedRes; if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; if (res.contentLength === void 0) throw new RangeError(`File download response doesn't contain valid content length header`); if (!res.etag) throw new RangeError(`File download response doesn't contain valid etag header`); return new BlobDownloadResponse(wrappedRes, async (start) => { var _a$3; const updatedDownloadOptions = { leaseAccessConditions: options.conditions, modifiedAccessConditions: { ifMatch: options.conditions.ifMatch || res.etag, ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, ifTags: (_a$3 = options.conditions) === null || _a$3 === void 0 ? void 0 : _a$3.tagConditions }, range: rangeToString({ count: offset + res.contentLength - start, offset: start }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }; return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress }); }); } /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. * * NOTE: use this function with care since an existing blob might be deleted by other clients or * applications. Vice versa new blobs might be added by other clients or applications after this * function completes. * * @param options - options to Exists operation. */ async exists(options = {}) { return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); await this.getProperties({ abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, tracingOptions: updatedOptions.tracingOptions }); return true; } catch (e) { if (e.statusCode === 404) return false; else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) return true; throw e; } }); } /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which * will retain their original casing. * * @param options - Optional options to Get Properties operation. */ async getProperties(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { var _a$2; const res = assertResponse(await this.blobContext.getProperties({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.delete({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { var _a$2, _b$1; try { const res = assertResponse(await this.delete(updatedOptions)); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a$2 = e.details) === null || _a$2 === void 0 ? void 0 : _a$2.errorCode) === "BlobNotFound") return Object.assign(Object.assign({ succeeded: false }, (_b$1 = e.response) === null || _b$1 === void 0 ? void 0 : _b$1.parsedHeaders), { _response: e.response }); throw e; } }); } /** * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 * or later. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/undelete-blob * * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { return assertResponse(await this.blobContext.undelete({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets system properties on the blob. * * If no value provided, or no value provided for the specified blob HTTP headers, * these blob HTTP headers without a value will be cleared. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * * @param blobHTTPHeaders - If no value provided, or no value provided for * the specified blob HTTP headers, these blob HTTP * headers without a value will be cleared. * A common header to set is `blobContentType` * enabling the browser to provide functionality * based on file type. * @param options - Optional options to Blob Set HTTP Headers operation. */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.setHttpHeaders({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. * * If no option provided, or no metadata defined in the parameter, the blob * metadata will be removed. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ async setMetadata(metadata$1, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata$1, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets tags on the underlying blob. * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. * Valid tag key and value characters include lower and upper case letters, digits (0-9), * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). * * @param tags - * @param options - */ async setTags(tags$1, options = {}) { return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.setTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions, tags: toBlobTags(tags$1) })); }); } /** * Gets the tags associated with the underlying blob. * * @param options - */ async getTags(options = {}) { return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { var _a$2; const response = assertResponse(await this.blobContext.getTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); return wrappedResponse; }); } /** * Get a {@link BlobLeaseClient} that manages leases on the blob. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the blob. */ getBlobLeaseClient(proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); } /** * Creates a read-only snapshot of a blob. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.createSnapshot({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Asynchronously copies a blob to a destination within the storage account. * This method returns a long running operation poller that allows you to wait * indefinitely until the copy is completed. * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. * Note that the onProgress callback will not be invoked if the operation completes in the first * request, and attempting to cancel a completed copy will result in an error being thrown. * * In version 2012-02-12 and later, the source for a Copy Blob operation can be * a committed blob in any Azure storage account. * Beginning with version 2015-02-21, the source for a Copy Blob operation can be * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob * * Example using automatic polling: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using manual polling: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * while (!poller.isDone()) { * await poller.poll(); * } * const result = copyPoller.getResult(); * ``` * * Example using progress updates: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url', { * onProgress(state) { * console.log(`Progress: ${state.copyProgress}`); * } * }); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using a changing polling interval (default 15 seconds): * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url', { * intervalInMs: 1000 // poll blob every 1 second for copy progress * }); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using copy cancellation: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * // cancel operation after starting it. * try { * await copyPoller.cancelOperation(); * // calls to get the result now throw PollerCancelledError * await copyPoller.getResult(); * } catch (err) { * if (err.name === 'PollerCancelledError') { * console.log('The copy was cancelled.'); * } * } * ``` * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ async beginCopyFromURL(copySource$1, options = {}) { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), startCopyFromURL: (...args) => this.startCopyFromURL(...args) }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, copySource: copySource$1, intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, startCopyFromURLOptions: options }); await poller.poll(); return poller; } /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob * * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ async abortCopyFromURL(copyId$1, options = {}) { return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { return assertResponse(await this.blobContext.abortCopyFromURL(copyId$1, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url * * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ async syncCopyFromURL(copySource$1, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { var _a$2, _b$1, _c$1, _d$1, _e, _f, _g; return assertResponse(await this.blobContext.copyFromURL(copySource$1, { abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b$1 = options.sourceConditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch, sourceIfModifiedSince: (_c$1 = options.sourceConditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifModifiedSince, sourceIfNoneMatch: (_d$1 = options.sourceConditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant * storage only). A premium page blob's tier determines the allowed size, IOPS, * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive * storage type. This operation does not update the blob's ETag. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tier * * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ async setAccessTier(tier$1, options = {}) { return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.blobContext.setTier(toAccessTier(tier$1), { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions })); }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { var _a$2; let buffer$2; let offset = 0; let count = 0; let options = param4; if (param1 instanceof Buffer) { buffer$2 = param1; offset = param2 || 0; count = typeof param3 === "number" ? param3 : 0; } else { offset = typeof param1 === "number" ? param1 : 0; count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } let blockSize = (_a$2 = options.blockSize) !== null && _a$2 !== void 0 ? _a$2 : 0; if (blockSize < 0) throw new RangeError("blockSize option must be >= 0"); if (blockSize === 0) blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; if (offset < 0) throw new RangeError("offset option must be >= 0"); if (count && count <= 0) throw new RangeError("count option must be greater than 0"); if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { if (!count) { const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); count = response.contentLength - offset; if (count < 0) throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } if (!buffer$2) try { buffer$2 = Buffer.alloc(count); } catch (error) { throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); } if (buffer$2.length < count) throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); let transferProgress = 0; const batch = new Batch(options.concurrency); for (let off = offset; off < offset + count; off = off + blockSize) batch.addOperation(async () => { let chunkEnd = offset + count; if (off + blockSize < chunkEnd) chunkEnd = off + blockSize; const response = await this.download(off, chunkEnd - off, { abortSignal: options.abortSignal, conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions }); const stream$3 = response.readableStreamBody; await streamToBuffer(stream$3, buffer$2, off - offset, chunkEnd - offset); transferProgress += chunkEnd - off; if (options.onProgress) options.onProgress({ loadedBytes: transferProgress }); }); await batch.do(); return buffer$2; }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Downloads an Azure Blob to a local file. * Fails if the the given file path already exits. * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. * * @param filePath - * @param offset - From which position of the block blob to download. * @param count - How much data to be downloaded. Will download to the end when passing undefined. * @param options - Options to Blob download options. * @returns The response data for blob download operation, * but with readableStreamBody set to undefined since its * content is already read and written into a local file * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); if (response.readableStreamBody) await readStreamToLocalFile(response.readableStreamBody, filePath); response.blobDownloadStream = void 0; return response; }); } getBlobAndContainerNamesFromUrl() { let containerName; let blobName; try { const parsedUrl = new URL(this.url); if (parsedUrl.host.split(".")[1] === "blob") { const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } else if (isIpEndpointStyle(parsedUrl)) { const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } containerName = decodeURIComponent(containerName); blobName = decodeURIComponent(blobName); blobName = blobName.replace(/\\/g, "/"); if (!containerName) throw new Error("Provided containerName is invalid."); return { blobName, containerName }; } catch (error) { throw new Error("Unable to extract blobName and containerName with provided information."); } } /** * Asynchronously copies a blob to a destination within the storage account. * In version 2012-02-12 and later, the source for a Copy Blob operation can be * a committed blob in any Azure storage account. * Beginning with version 2015-02-21, the source for a Copy Blob operation can be * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/copy-blob * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ async startCopyFromURL(copySource$1, options = {}) { return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { var _a$2, _b$1, _c$1; options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; return assertResponse(await this.blobContext.startCopyFromURL(copySource$1, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions }, immutabilityPolicyExpiry: (_b$1 = options.immutabilityPolicy) === null || _b$1 === void 0 ? void 0 : _b$1.expiriesOn, immutabilityPolicyMode: (_c$1 = options.immutabilityPolicy) === null || _c$1 === void 0 ? void 0 : _c$1.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { if (!(this.credential instanceof StorageSharedKeyCredential)) throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasStringToSign(options) { if (!(this.credential instanceof StorageSharedKeyCredential)) throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; } /** * * Generates a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; } /** * Delete the immutablility policy on the blob. * * @param options - Optional options to delete immutability policy on the blob. */ async deleteImmutabilityPolicy(options = {}) { return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ tracingOptions: updatedOptions.tracingOptions })); }); } /** * Set immutability policy on the blob. * * @param options - Optional options to set immutability policy on the blob. */ async setImmutabilityPolicy(immutabilityPolicy, options = {}) { return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { return assertResponse(await this.blobContext.setImmutabilityPolicy({ immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Set legal hold on the blob. * * @param options - Optional options to set legal hold on the blob. */ async setLegalHold(legalHoldEnabled, options = {}) { return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { tracingOptions: updatedOptions.tracingOptions })); }); } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.blobContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } }; /** * AppendBlobClient defines a set of operations applicable to append blobs. */ var AppendBlobClient = class AppendBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; let url$1; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url$1 = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } else throw new Error("Expecting non-empty strings for containerName and blobName parameters"); super(url$1, pipeline); this.appendBlobContext = this.storageClientContext.appendBlob; } /** * Creates a new AppendBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot$1) { return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot$1.length === 0 ? void 0 : snapshot$1), this.pipeline); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param options - Options to the Append Block Create operation. * * * Example usage: * * ```js * const appendBlobClient = containerClient.getAppendBlobClient(""); * await appendBlobClient.create(); * ``` */ async create(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { var _a$2, _b$1, _c$1; return assertResponse(await this.appendBlobContext.create(0, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b$1 = options.immutabilityPolicy) === null || _b$1 === void 0 ? void 0 : _b$1.expiriesOn, immutabilityPolicyMode: (_c$1 = options.immutabilityPolicy) === null || _c$1 === void 0 ? void 0 : _c$1.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * If the blob with the same name already exists, the content of the existing blob will remain unchanged. * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param options - */ async createIfNotExists(options = {}) { const conditions = { ifNoneMatch: ETagAny }; return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { var _a$2, _b$1; try { const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a$2 = e.details) === null || _a$2 === void 0 ? void 0 : _a$2.errorCode) === "BlobAlreadyExists") return Object.assign(Object.assign({ succeeded: false }, (_b$1 = e.response) === null || _b$1 === void 0 ? void 0 : _b$1.parsedHeaders), { _response: e.response }); throw e; } }); } /** * Seals the append blob, making it read only. * * @param options - */ async seal(options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.appendBlobContext.seal({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Commits a new block of data to the end of the existing append blob. * @see https://learn.microsoft.com/rest/api/storageservices/append-block * * @param body - Data to be appended. * @param contentLength - Length of the body in bytes. * @param options - Options to the Append Block operation. * * * Example usage: * * ```js * const content = "Hello World!"; * * // Create a new append blob and append data to the blob. * const newAppendBlobClient = containerClient.getAppendBlobClient(""); * await newAppendBlobClient.create(); * await newAppendBlobClient.appendBlock(content, content.length); * * // Append data to an existing append blob. * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ async appendBlock(body$1, contentLength$1, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.appendBlobContext.appendBlock(contentLength$1, body$1, { abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), requestOptions: { onUploadProgress: options.onProgress }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * * @param sourceURL - * The url to the blob that will be the source of the copy. A source blob in the same storage account can * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob * must either be public or must be authenticated via a shared access signature. If the source blob is * public, no authentication is required to perform the operation. * @param sourceOffset - Offset in source to be appended * @param count - Number of bytes to be appended as a block * @param options - */ async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { var _a$2, _b$1, _c$1, _d$1, _e; return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b$1 = options.sourceConditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch, sourceIfModifiedSince: (_c$1 = options.sourceConditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifModifiedSince, sourceIfNoneMatch: (_d$1 = options.sourceConditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } }; /** * BlockBlobClient defines a set of operations applicable to block blobs. */ var BlockBlobClient = class BlockBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; let url$1; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url$1 = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") options = blobNameOrOptions; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } else throw new Error("Expecting non-empty strings for containerName and blobName parameters"); super(url$1, pipeline); this.blockBlobContext = this.storageClientContext.blockBlob; this._blobContext = this.storageClientContext.blob; } /** * Creates a new BlockBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a URL to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot$1) { return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot$1.length === 0 ? void 0 : snapshot$1), this.pipeline); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Quick query for a JSON or CSV formatted blob. * * Example usage (Node.js): * * ```js * // Query and convert a blob to a string * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); * console.log("Query blob content:", downloaded); * * async function streamToBuffer(readableStream) { * return new Promise((resolve, reject) => { * const chunks = []; * readableStream.on("data", (data) => { * chunks.push(typeof data === "string" ? Buffer.from(data) : data); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); * readableStream.on("error", reject); * }); * } * ``` * * @param query - * @param options - */ async query(query, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); if (!coreUtil.isNode) throw new Error("This operation currently is only supported in Node.js."); return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { var _a$2; const response = assertResponse(await this._blobContext.query({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), outputSerialization: toQuerySerialization(options.outputTextConfiguration) }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions })); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError }); }); } /** * Creates a new block blob, or updates the content of an existing block blob. * Updating an existing block blob overwrites any existing metadata on the blob. * Partial updates are not supported; the content of the existing blob is * overwritten with the new content. To perform a partial update of a block blob's, * use {@link stageBlock} and {@link commitBlockList}. * * This is a non-parallel uploading method, please use {@link uploadFile}, * {@link uploadStream} or {@link uploadBrowserData} for better performance * with concurrency uploading. * * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function * which returns a new Readable stream whose offset is from data source beginning. * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a * string including non non-Base64/Hex-encoded characters. * @param options - Options to the Block Blob Upload operation. * @returns Response data for the Block Blob Upload operation. * * Example usage: * * ```js * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ async upload(body$1, contentLength$1, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { var _a$2, _b$1, _c$1; return assertResponse(await this.blockBlobContext.upload(contentLength$1, body$1, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), requestOptions: { onUploadProgress: options.onProgress }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b$1 = options.immutabilityPolicy) === null || _b$1 === void 0 ? void 0 : _b$1.expiriesOn, immutabilityPolicyMode: (_c$1 = options.immutabilityPolicy) === null || _c$1 === void 0 ? void 0 : _c$1.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Creates a new Block Blob where the contents of the blob are read from a given URL. * This API is supported beginning with the 2020-04-08 version. Partial updates * are not supported with Put Blob from URL; the content of an existing blob is overwritten with * the content of the new blob. To perform partial updates to a block blob’s contents using a * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. * * @param sourceURL - Specifies the URL of the blob. The value * may be a URL of up to 2 KB in length that specifies a blob. * The value should be URL-encoded as it would appear * in a request URI. The source blob must either be public * or must be authenticated via a shared access signature. * If the source blob is public, no authentication is required * to perform the operation. Here are some examples of source object URLs: * - https://myaccount.blob.core.windows.net/mycontainer/myblob * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Optional parameters. */ async syncUploadFromURL(sourceURL, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { var _a$2, _b$1, _c$1, _d$1, _e, _f; return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b$1 = options.sourceConditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch, sourceIfModifiedSince: (_c$1 = options.sourceConditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifModifiedSince, sourceIfNoneMatch: (_d$1 = options.sourceConditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); }); } /** * Uploads the specified block to the block blob's "staging area" to be later * committed by a call to commitBlockList. * @see https://learn.microsoft.com/rest/api/storageservices/put-block * * @param blockId - A 64-byte value that is base64-encoded * @param body - Data to upload to the staging area. * @param contentLength - Number of bytes to upload. * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ async stageBlock(blockId$1, body$1, contentLength$1, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { return assertResponse(await this.blockBlobContext.stageBlock(blockId$1, contentLength$1, body$1, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { onUploadProgress: options.onProgress }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. * This API is available starting in version 2018-03-28. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-block-from-url * * @param blockId - A 64-byte value that is base64-encoded * @param sourceURL - Specifies the URL of the blob. The value * may be a URL of up to 2 KB in length that specifies a blob. * The value should be URL-encoded as it would appear * in a request URI. The source blob must either be public * or must be authenticated via a shared access signature. * If the source blob is public, no authentication is required * to perform the operation. Here are some examples of source object URLs: * - https://myaccount.blob.core.windows.net/mycontainer/myblob * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ async stageBlockFromURL(blockId$1, sourceURL, offset = 0, count, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId$1, 0, sourceURL, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Writes a blob by specifying the list of block IDs that make up the blob. * In order to be written as part of a blob, a block must have been successfully written * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to * update a blob by uploading only those blocks that have changed, then committing the new and existing * blocks together. Any blocks not specified in the block list and permanently deleted. * @see https://learn.microsoft.com/rest/api/storageservices/put-block-list * * @param blocks - Array of 64-byte value that is base64-encoded * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ async commitBlockList(blocks$1, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { var _a$2, _b$1, _c$1; return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks$1 }, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b$1 = options.immutabilityPolicy) === null || _b$1 === void 0 ? void 0 : _b$1.expiriesOn, immutabilityPolicyMode: (_c$1 = options.immutabilityPolicy) === null || _c$1 === void 0 ? void 0 : _c$1.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Returns the list of blocks that have been uploaded as part of a block blob * using the specified block list filter. * @see https://learn.microsoft.com/rest/api/storageservices/get-block-list * * @param listType - Specifies whether to return the list of committed blocks, * the list of uncommitted blocks, or both lists together. * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ async getBlockList(listType$1, options = {}) { return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { var _a$2; const res = assertResponse(await this.blockBlobContext.getBlockList(listType$1, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); if (!res.committedBlocks) res.committedBlocks = []; if (!res.uncommittedBlocks) res.uncommittedBlocks = []; return res; }); } /** * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. * * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. * * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is * `blobContentType`, enabling the browser to provide * functionality based on file type. * * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView * @param options - */ async uploadData(data, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { if (coreUtil.isNode) { let buffer$2; if (data instanceof Buffer) buffer$2 = data; else if (data instanceof ArrayBuffer) buffer$2 = Buffer.from(data); else { data = data; buffer$2 = Buffer.from(data.buffer, data.byteOffset, data.byteLength); } return this.uploadSeekableInternal((offset, size) => buffer$2.slice(offset, offset + size), buffer$2.byteLength, updatedOptions); } else { const browserBlob = new Blob([data]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); } }); } /** * ONLY AVAILABLE IN BROWSERS. * * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. * * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call * {@link commitBlockList} to commit the block list. * * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is * `blobContentType`, enabling the browser to provide * functionality based on file type. * * @deprecated Use {@link uploadData} instead. * * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView * @param options - Options to upload browser data. * @returns Response data for the Blob Upload operation. */ async uploadBrowserData(browserData, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { const browserBlob = new Blob([browserData]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); }); } /** * * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. * * @param bodyFactory - * @param size - size of the data to upload. * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { var _a$2, _b$1; let blockSize = (_a$2 = options.blockSize) !== null && _a$2 !== void 0 ? _a$2 : 0; if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); const maxSingleShotSize = (_b$1 = options.maxSingleShotSize) !== null && _b$1 !== void 0 ? _b$1 : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); if (blockSize === 0) { if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) throw new RangeError(`${size} is too larger to upload to a block blob.`); if (size > maxSingleShotSize) { blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } if (!options.blobHTTPHeaders) options.blobHTTPHeaders = {}; if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { if (size <= maxSingleShotSize) return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); const numBlocks = Math.floor((size - 1) / blockSize) + 1; if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); const blockList = []; const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const batch = new Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) batch.addOperation(async () => { const blockID = generateBlockID(blockIDPrefix, i); const start = blockSize * i; const end = i === numBlocks - 1 ? size : start + blockSize; const contentLength$1 = end - start; blockList.push(blockID); await this.stageBlock(blockID, bodyFactory(start, contentLength$1), contentLength$1, { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions }); transferProgress += contentLength$1; if (options.onProgress) options.onProgress({ loadedBytes: transferProgress }); }); await batch.do(); return this.commitBlockList(blockList, updatedOptions); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Uploads a local file in blocks to a block blob. * * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList * to commit the block list. * * @param filePath - Full path of local file * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { const size = (await fsStat(filePath)).size; return this.uploadSeekableInternal((offset, count) => { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Uploads a Node.js Readable stream into block blob. * * PERFORMANCE IMPROVEMENT TIPS: * * Input stream highWaterMark is better to set a same value with bufferSize * parameter, which will avoid Buffer.concat() operations. * * @param stream - Node.js Readable stream * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, * positive correlation with max uploading concurrency. Default value is 5 * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadStream(stream$3, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) options.blobHTTPHeaders = {}; if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const blockList = []; const scheduler = new BufferScheduler( stream$3, bufferSize, maxConcurrency, async (body$1, length) => { const blockID = generateBlockID(blockIDPrefix, blockNum); blockList.push(blockID); blockNum++; await this.stageBlock(blockID, body$1, length, { customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions }); transferProgress += length; if (options.onProgress) options.onProgress({ loadedBytes: transferProgress }); }, // concurrency should set a smaller value than maxConcurrency, which is helpful to // reduce the possibility when a outgoing handler waits for stream data, in // this situation, outgoing handlers are blocked. // Outgoing queue shouldn't be empty. Math.ceil(maxConcurrency / 4 * 3) ); await scheduler.do(); return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); }); } }; /** * PageBlobClient defines a set of operations applicable to page blobs. */ var PageBlobClient = class PageBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { let pipeline; let url$1; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url$1 = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { url$1 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } else throw new Error("Expecting non-empty strings for containerName and blobName parameters"); super(url$1, pipeline); this.pageBlobContext = this.storageClientContext.pageBlob; } /** * Creates a new PageBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot$1) { return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot$1.length === 0 ? void 0 : snapshot$1), this.pipeline); } /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param size - size of the page blob. * @param options - Options to the Page Blob Create operation. * @returns Response data for the Page Blob Create operation. */ async create(size, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { var _a$2, _b$1, _c$1; return assertResponse(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b$1 = options.immutabilityPolicy) === null || _b$1 === void 0 ? void 0 : _b$1.expiriesOn, immutabilityPolicyMode: (_c$1 = options.immutabilityPolicy) === null || _c$1 === void 0 ? void 0 : _c$1.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. If the blob with the same name already exists, the content * of the existing blob will remain unchanged. * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param size - size of the page blob. * @param options - */ async createIfNotExists(size, options = {}) { return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { var _a$2, _b$1; try { const conditions = { ifNoneMatch: ETagAny }; const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a$2 = e.details) === null || _a$2 === void 0 ? void 0 : _a$2.errorCode) === "BlobAlreadyExists") return Object.assign(Object.assign({ succeeded: false }, (_b$1 = e.response) === null || _b$1 === void 0 ? void 0 : _b$1.parsedHeaders), { _response: e.response }); throw e; } }); } /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. * @see https://learn.microsoft.com/rest/api/storageservices/put-page * * @param body - Data to upload * @param offset - Offset of destination page blob * @param count - Content length of the body, also number of bytes to be uploaded * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ async uploadPages(body$1, offset, count, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.uploadPages(count, body$1, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), requestOptions: { onUploadProgress: options.onProgress }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob * @param destOffset - Offset of destination page blob * @param count - Number of bytes to be uploaded from source page blob * @param options - */ async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { var _a$2, _b$1, _c$1, _d$1, _e; return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b$1 = options.sourceConditions) === null || _b$1 === void 0 ? void 0 : _b$1.ifMatch, sourceIfModifiedSince: (_c$1 = options.sourceConditions) === null || _c$1 === void 0 ? void 0 : _c$1.ifModifiedSince, sourceIfNoneMatch: (_d$1 = options.sourceConditions) === null || _d$1 === void 0 ? void 0 : _d$1.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Frees the specified pages from the page blob. * @see https://learn.microsoft.com/rest/api/storageservices/put-page * * @param offset - Starting byte position of the pages to clear. * @param count - Number of bytes to clear. * @param options - Options to the Page Blob Clear Pages operation. * @returns Response data for the Page Blob Clear Pages operation. */ async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. * @returns Response data for the Page Blob Get Ranges operation. */ async getPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { var _a$2; const response = assertResponse(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); return rangeResponseFromModel(response); }); } /** * getPageRangesSegment returns a single segment of page ranges starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to PageBlob Get Page Ranges Segment operation. */ async listPageRangesSegment(offset = 0, count, marker$1, options = {}) { return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), range: rangeToString({ offset, count }), marker: marker$1, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of * the get of page ranges to be returned with the next getting operation. The * operation returns the ContinuationToken value within the response body if the * getting operation did not return all page ranges remaining within the current page. * The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ listPageRangeItemSegments() { return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker$1, options = {}) { let getPageRangeItemSegmentsResponse; if (!!marker$1 || marker$1 === void 0) do { getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker$1, options)); marker$1 = getPageRangeItemSegmentsResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); } while (marker$1); }); } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ listPageRangeItems() { return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { var _a$2, e_1, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const getPageRangesSegment = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to list of page ranges for a page blob. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * * Example using `for await` syntax: * * ```js * // Get the pageBlobClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; * for await (const pageRange of pageBlobClient.listPageRanges()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = pageBlobClient.listPageRanges(); * let pageRangeItem = await iter.next(); * while (!pageRangeItem.done) { * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); * pageRangeItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 page ranges * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 page ranges * for (const blob of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. * @returns An asyncIterableIterator that supports paging. */ listPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; const iter = this.listPageRangeItems(offset, count, options); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); } }; } /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page blob * @param count - Number of bytes to get ranges diff. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { var _a$2; const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); return rangeResponseFromModel(result); }); } /** * getPageRangesDiffSegment returns a single segment of page ranges starting from the * specified Marker for difference between previous snapshot and the target page blob. * Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesDiffSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker$1, options = {}) { return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a$2 = options === null || options === void 0 ? void 0 : options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ offset, count }), marker: marker$1, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of * the get of page ranges to be returned with the next getting operation. The * operation returns the ContinuationToken value within the response body if the * getting operation did not return all page ranges remaining within the current page. * The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker$1, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { let getPageRangeItemSegmentsResponse; if (!!marker$1 || marker$1 === void 0) do { getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker$1, options)); marker$1 = getPageRangeItemSegmentsResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); } while (marker$1); }); } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { var _a$2, e_2, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const getPageRangesSegment = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * * Example using `for await` syntax: * * ```js * // Get the pageBlobClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = pageBlobClient.listPageRangesDiff(); * let pageRangeItem = await iter.next(); * while (!pageRangeItem.done) { * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); * pageRangeItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 page ranges * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 page ranges * for (const blob of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Ranges operation. * @returns An asyncIterableIterator that supports paging. */ listPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); } }; } /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. * @see https://learn.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page blob * @param count - Number of bytes to get ranges diff. * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl$1, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { var _a$2; const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), prevSnapshotUrl: prevSnapshotUrl$1, range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions })); return rangeResponseFromModel(response); }); } /** * Resizes the page blob to the specified size (which must be a multiple of 512). * @see https://learn.microsoft.com/rest/api/storageservices/set-blob-properties * * @param size - Target size * @param options - Options to the Page Blob Resize operation. * @returns Response data for the Page Blob Resize operation. */ async resize(size, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.resize(size, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets a page blob's sequence number. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. * @param sequenceNumber - Required if sequenceNumberAction is max or update * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ async updateSequenceNumber(sequenceNumberAction$1, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction$1, { abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. * The snapshot is copied such that only the differential changes between the previously * copied snapshot are transferred to the destination. * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. * @see https://learn.microsoft.com/rest/api/storageservices/incremental-copy-blob * @see https://learn.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots * * @param copySource - Specifies the name of the source page blob snapshot. For example, * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ async startCopyIncremental(copySource$1, options = {}) { return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { var _a$2; return assertResponse(await this.pageBlobContext.copyIncremental(copySource$1, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a$2 = options.conditions) === null || _a$2 === void 0 ? void 0 : _a$2.tagConditions }), tracingOptions: updatedOptions.tracingOptions })); }); } }; async function getBodyAsText(batchResponse) { let buffer$2 = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer$2); buffer$2 = buffer$2.slice(0, responseLength); return buffer$2.toString(); } function utf8ByteLength(str) { return Buffer.byteLength(str); } const HTTP_HEADER_DELIMITER = ": "; const SPACE_DELIMITER = " "; const NOT_FOUND = -1; /** * Util class for parsing batch response. */ var BatchResponseParser = class { constructor(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); if (!subRequests || subRequests.size === 0) throw new RangeError("Invalid state: subRequests is not provided or size is 0."); this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } async parseBatchResponse() { if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); const responseBodyAsText = await getBodyAsText(this.batchResponse); const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); const subResponseCount = subResponses.length; if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); const deserializedSubResponses = new Array(subResponseCount); let subResponsesSucceededCount = 0; let subResponsesFailedCount = 0; for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; let subRespFailed = false; let contentId = NOT_FOUND; for (const responseLine of responseLines) { if (!subRespHeaderStartFound) { if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); if (responseLine.startsWith(HTTP_VERSION_1_1)) { subRespHeaderStartFound = true; const tokens = responseLine.split(SPACE_DELIMITER); deserializedSubResponse.status = parseInt(tokens[1]); deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); } continue; } if (responseLine.trim() === "") { if (!subRespHeaderEndFound) subRespHeaderEndFound = true; continue; } if (!subRespHeaderEndFound) { if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); const tokens = responseLine.split(HTTP_HEADER_DELIMITER); deserializedSubResponse.headers.set(tokens[0], tokens[1]); if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { deserializedSubResponse.errorCode = tokens[1]; subRespFailed = true; } } else { if (!deserializedSubResponse.bodyAsText) deserializedSubResponse.bodyAsText = ""; deserializedSubResponse.bodyAsText += responseLine; } } if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { deserializedSubResponse._request = this.subRequests.get(contentId); deserializedSubResponses[contentId] = deserializedSubResponse; } else logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); if (subRespFailed) subResponsesFailedCount++; else subResponsesSucceededCount++; } return { subResponses: deserializedSubResponses, subResponsesSucceededCount, subResponsesFailedCount }; } }; var MutexLockStatus; (function(MutexLockStatus$1) { MutexLockStatus$1[MutexLockStatus$1["LOCKED"] = 0] = "LOCKED"; MutexLockStatus$1[MutexLockStatus$1["UNLOCKED"] = 1] = "UNLOCKED"; })(MutexLockStatus || (MutexLockStatus = {})); /** * An async mutex lock. */ var Mutex = class { /** * Lock for a specific key. If the lock has been acquired by another customer, then * will wait until getting the lock. * * @param key - lock key */ static async lock(key) { return new Promise((resolve) => { if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { this.keys[key] = MutexLockStatus.LOCKED; resolve(); } else this.onUnlockEvent(key, () => { this.keys[key] = MutexLockStatus.LOCKED; resolve(); }); }); } /** * Unlock a key. * * @param key - */ static async unlock(key) { return new Promise((resolve) => { if (this.keys[key] === MutexLockStatus.LOCKED) this.emitUnlockEvent(key); delete this.keys[key]; resolve(); }); } static onUnlockEvent(key, handler) { if (this.listeners[key] === void 0) this.listeners[key] = [handler]; else this.listeners[key].push(handler); } static emitUnlockEvent(key) { if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { const handler = this.listeners[key].shift(); setImmediate(() => { handler.call(this); }); } } }; Mutex.keys = {}; Mutex.listeners = {}; /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. */ var BlobBatch = class { constructor() { this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } /** * Get the value of Content-Type for a batch request. * The value must be multipart/mixed with a batch boundary. * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ getMultiPartContentType() { return this.batchRequest.getMultipartContentType(); } /** * Get assembled HTTP request body for sub requests. */ getHttpRequestBody() { return this.batchRequest.getHttpRequestBody(); } /** * Get sub requests that are added into the batch request. */ getSubRequests() { return this.batchRequest.getSubRequests(); } async addSubRequestInternal(subRequest, assembleSubRequestFunc) { await Mutex.lock(this.batch); try { this.batchRequest.preAddSubRequest(subRequest); await assembleSubRequestFunc(); this.batchRequest.postAddSubRequest(subRequest); } finally { await Mutex.unlock(this.batch); } } setBatchType(batchType) { if (!this.batchType) this.batchType = batchType; if (this.batchType !== batchType) throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); } async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { let url$1; let credential; if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrOptions))) { url$1 = urlOrBlobClient; credential = credentialOrOptions; } else if (urlOrBlobClient instanceof BlobClient) { url$1 = urlOrBlobClient.url; credential = urlOrBlobClient.credential; options = credentialOrOptions; } else throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); if (!options) options = {}; return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url$1, credential }, async () => { await new BlobClient(url$1, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { let url$1; let credential; let tier$1; if (typeof urlOrBlobClient === "string" && (coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrTier))) { url$1 = urlOrBlobClient; credential = credentialOrTier; tier$1 = tierOrOptions; } else if (urlOrBlobClient instanceof BlobClient) { url$1 = urlOrBlobClient.url; credential = urlOrBlobClient.credential; tier$1 = credentialOrTier; options = tierOrOptions; } else throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); if (!options) options = {}; return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url$1, credential }, async () => { await new BlobClient(url$1, this.batchRequest.createPipeline(credential)).setAccessTier(tier$1, updatedOptions); }); }); } }; /** * Inner batch request class which is responsible for assembling and serializing sub requests. * See https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. */ var InnerBatchRequest = class { constructor() { this.operationCount = 0; this.body = ""; const tempGuid = coreUtil.randomUUID(); this.boundary = `batch_${tempGuid}`; this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; this.batchRequestEnding = `--${this.boundary}--`; this.subRequests = new Map(); } /** * Create pipeline to assemble sub requests. The idea here is to use existing * credential and serialization/deserialization components, with additional policies to * filter unnecessary headers, assemble sub requests into request's body * and intercept request from going to wire. * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { const corePipeline = coreRestPipeline.createEmptyPipeline(); corePipeline.addPolicy(coreClient.serializationPolicy({ stringifyXML: coreXml.stringifyXML, serializerOptions: { xml: { xmlCharKey: "#" } } }), { phase: "Serialize" }); corePipeline.addPolicy(batchHeaderFilterPolicy()); corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); if (coreAuth.isTokenCredential(credential)) corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ credential, scopes: StorageOAuthScopes, challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge } }), { phase: "Sign" }); else if (credential instanceof StorageSharedKeyCredential) corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ accountName: credential.accountName, accountKey: credential.accountKey }), { phase: "Sign" }); const pipeline = new Pipeline([]); pipeline._credential = credential; pipeline._corePipeline = corePipeline; return pipeline; } appendSubRequestToBody(request) { this.body += [ this.subRequestPrefix, `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, "", `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` ].join(HTTP_LINE_ENDING); for (const [name, value] of request.headers) this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; this.body += HTTP_LINE_ENDING; } preAddSubRequest(subRequest) { if (this.operationCount >= BATCH_MAX_REQUEST) throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); const path$13 = getURLPath(subRequest.url); if (!path$13 || path$13 === "") throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } postAddSubRequest(subRequest) { this.subRequests.set(this.operationCount, subRequest); this.operationCount++; } getHttpRequestBody() { return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; } getMultipartContentType() { return this.multipartContentType; } getSubRequests() { return this.subRequests; } }; function batchRequestAssemblePolicy(batchRequest) { return { name: "batchRequestAssemblePolicy", async sendRequest(request) { batchRequest.appendSubRequestToBody(request); return { request, status: 200, headers: coreRestPipeline.createHttpHeaders() }; } }; } function batchHeaderFilterPolicy() { return { name: "batchHeaderFilterPolicy", async sendRequest(request, next) { let xMsHeaderName = ""; for (const [name] of request.headers) if (iEqual(name, HeaderConstants.X_MS_VERSION)) xMsHeaderName = name; if (xMsHeaderName !== "") request.headers.delete(xMsHeaderName); return next(request); } }; } /** * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch */ var BlobBatchClient = class { constructor(url$1, credentialOrPipeline, options) { let pipeline; if (isPipelineLike(credentialOrPipeline)) pipeline = credentialOrPipeline; else if (!credentialOrPipeline) pipeline = newPipeline(new AnonymousCredential(), options); else pipeline = newPipeline(credentialOrPipeline, options); const storageClientContext = new StorageContextClient(url$1, getCoreClientOptions(pipeline)); const path$13 = getURLPath(url$1); if (path$13 && path$13 !== "/") this.serviceOrContainerContext = storageClientContext.container; else this.serviceOrContainerContext = storageClientContext.service; } /** * Creates a {@link BlobBatch}. * A BlobBatch represents an aggregated set of operations on blobs. */ createBatch() { return new BlobBatch(); } async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { const batch = new BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) if (typeof urlOrBlobClient === "string") await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); else await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); return this.submitBatch(batch); } async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { const batch = new BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) if (typeof urlOrBlobClient === "string") await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); else await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); return this.submitBatch(batch); } /** * Submit batch request which consists of multiple subrequests. * * Get `blobBatchClient` and other details before running the snippets. * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * * Example usage: * * ```js * let batchRequest = new BlobBatch(); * await batchRequest.deleteBlob(urlInString0, credential0); * await batchRequest.deleteBlob(urlInString1, credential1, { * deleteSnapshots: "include" * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * * Example using a lease: * * ```js * let batchRequest = new BlobBatch(); * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { * conditions: { leaseId: leaseId } * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @param batchRequest - A set of Delete or SetTier operations. * @param options - */ async submitBatch(batchRequest, options = {}) { if (!batchRequest || batchRequest.getSubRequests().size === 0) throw new RangeError("Batch request should contain one or more sub requests."); return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); const res = { _response: rawBatchResponse._response, contentType: rawBatchResponse.contentType, errorCode: rawBatchResponse.errorCode, requestId: rawBatchResponse.requestId, clientRequestId: rawBatchResponse.clientRequestId, version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, subResponsesFailedCount: responseSummary.subResponsesFailedCount }; return res; }); } }; /** * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ var ContainerClient = class extends StorageClient { /** * The name of the container. */ get containerName() { return this._containerName; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { let pipeline; let url$1; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if (coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { url$1 = urlOrConnectionString; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url$1 = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url$1 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); pipeline = newPipeline(sharedKeyCredential, options); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { url$1 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } else throw new Error("Expecting non-empty strings for containerName parameter"); super(url$1, pipeline); this._containerName = this.getContainerNameFromUrl(); this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - Options to Container Create operation. * * * Example usage: * * ```js * const containerClient = blobServiceClient.getContainerClient(""); * const createContainerResponse = await containerClient.create(); * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ async create(options = {}) { return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { return assertResponse(await this.containerContext.create(updatedOptions)); }); } /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - */ async createIfNotExists(options = {}) { return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { var _a$2, _b$1; try { const res = await this.create(updatedOptions); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a$2 = e.details) === null || _a$2 === void 0 ? void 0 : _a$2.errorCode) === "ContainerAlreadyExists") return Object.assign(Object.assign({ succeeded: false }, (_b$1 = e.response) === null || _b$1 === void 0 ? void 0 : _b$1.parsedHeaders), { _response: e.response }); else throw e; } }); } /** * Returns true if the Azure container resource represented by this client exists; false otherwise. * * NOTE: use this function with care since an existing container might be deleted by other clients or * applications. Vice versa new containers with the same name might be added by other clients or * applications after this function completes. * * @param options - */ async exists(options = {}) { return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { try { await this.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions }); return true; } catch (e) { if (e.statusCode === 404) return false; throw e; } }); } /** * Creates a {@link BlobClient} * * @param blobName - A blob name * @returns A new BlobClient object for the given blob name. */ getBlobClient(blobName) { return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates an {@link AppendBlobClient} * * @param blobName - An append blob name */ getAppendBlobClient(blobName) { return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates a {@link BlockBlobClient} * * @param blobName - A block blob name * * * Example usage: * * ```js * const content = "Hello world!"; * * const blockBlobClient = containerClient.getBlockBlobClient(""); * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ getBlockBlobClient(blobName) { return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates a {@link PageBlobClient} * * @param blobName - A page blob name */ getPageBlobClient(blobName) { return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which * will retain their original casing. * * @param options - Options to Container Get Properties operation. */ async getProperties(options = {}) { if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); }); } /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async delete(options = {}) { if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { return assertResponse(await this.containerContext.delete({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { var _a$2, _b$1; try { const res = await this.delete(updatedOptions); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a$2 = e.details) === null || _a$2 === void 0 ? void 0 : _a$2.errorCode) === "ContainerNotFound") return Object.assign(Object.assign({ succeeded: false }, (_b$1 = e.response) === null || _b$1 === void 0 ? void 0 : _b$1.parsedHeaders), { _response: e.response }); throw e; } }); } /** * Sets one or more user-defined name-value pairs for the specified container. * * If no option provided, or no metadata defined in the parameter, the container * metadata will be removed. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ async setMetadata(metadata$1, options = {}) { if (!options.conditions) options.conditions = {}; if (options.conditions.ifUnmodifiedSince) throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { return assertResponse(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata$1, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Gets the permissions for the specified container. The permissions indicate * whether container data may be accessed publicly. * * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-container-acl * * @param options - Options to Container Get Access Policy operation. */ async getAccessPolicy(options = {}) { if (!options.conditions) options.conditions = {}; return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); const res = { _response: response._response, blobPublicAccess: response.blobPublicAccess, date: response.date, etag: response.etag, errorCode: response.errorCode, lastModified: response.lastModified, requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], version: response.version }; for (const identifier of response) { let accessPolicy = void 0; if (identifier.accessPolicy) { accessPolicy = { permissions: identifier.accessPolicy.permissions }; if (identifier.accessPolicy.expiresOn) accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); if (identifier.accessPolicy.startsOn) accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); } res.signedIdentifiers.push({ accessPolicy, id: identifier.id }); } return res; }); } /** * Sets the permissions for the specified container. The permissions indicate * whether blobs in a container may be accessed publicly. * * When you set permissions for a container, the existing permissions are replaced. * If no access or containerAcl provided, the existing container ACL will be * removed. * * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. * During this interval, a shared access signature that is associated with the stored access policy will * fail with status code 403 (Forbidden), until the access policy becomes active. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-container-acl * * @param access - The level of public access to data in the container. * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ async setAccessPolicy(access$1, containerAcl$1, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; for (const identifier of containerAcl$1 || []) acl.push({ accessPolicy: { expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" }, id: identifier.id }); return assertResponse(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, access: access$1, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Get a {@link BlobLeaseClient} that manages leases on the container. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the container. */ getBlobLeaseClient(proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); } /** * Creates a new block blob, or updates the content of an existing block blob. * * Updating an existing block blob overwrites any existing metadata on the blob. * Partial updates are not supported; the content of the existing blob is * overwritten with the new content. To perform a partial update of a block blob's, * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. * * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better * performance with concurrency uploading. * * @see https://learn.microsoft.com/rest/api/storageservices/put-blob * * @param blobName - Name of the block blob to create or update. * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function * which returns a new Readable stream whose offset is from data source beginning. * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a * string including non non-Base64/Hex-encoded characters. * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ async uploadBlockBlob(blobName, body$1, contentLength$1, options = {}) { return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); const response = await blockBlobClient.upload(body$1, contentLength$1, updatedOptions); return { blockBlobClient, response }; }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param blobName - * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) blobClient = blobClient.withVersion(options.versionId); return blobClient.delete(updatedOptions); }); } /** * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call listBlobsFlatSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs * * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ async listBlobFlatSegment(marker$1, options = {}) { return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker$1 }, options), { tracingOptions: updatedOptions.tracingOptions }))); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; }); } /** * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment * again (passing the the previously-returned Marker) to get the next segment. * @see https://learn.microsoft.com/rest/api/storageservices/list-blobs * * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter$1, marker$1, options = {}) { return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { var _a$2; const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter$1, Object.assign(Object.assign({ marker: marker$1 }, options), { tracingOptions: updatedOptions.tracingOptions }))); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a$2 = response.segment.blobPrefixes) === null || _a$2 === void 0 ? void 0 : _a$2.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ listSegments(marker_1) { return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker$1, options = {}) { let listBlobsFlatSegmentResponse; if (!!marker$1 || marker$1 === void 0) do { listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker$1, options)); marker$1 = listBlobsFlatSegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); } while (marker$1); }); } /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ listItems() { return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { var _a$2, e_1, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.listSegments(marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const listBlobsFlatSegmentResponse = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to list all the blobs * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js * // Get the containerClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("");` * let i = 1; * for await (const blob of containerClient.listBlobsFlat()) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = containerClient.listBlobsFlat(); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 blob names * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * @param options - Options to list blobs. * @returns An asyncIterableIterator that supports paging. */ listBlobsFlat(options = {}) { const include$1 = []; if (options.includeCopy) include$1.push("copy"); if (options.includeDeleted) include$1.push("deleted"); if (options.includeMetadata) include$1.push("metadata"); if (options.includeSnapshots) include$1.push("snapshots"); if (options.includeVersions) include$1.push("versions"); if (options.includeUncommitedBlobs) include$1.push("uncommittedblobs"); if (options.includeTags) include$1.push("tags"); if (options.includeDeletedWithVersions) include$1.push("deletedwithversions"); if (options.includeImmutabilityPolicy) include$1.push("immutabilitypolicy"); if (options.includeLegalHold) include$1.push("legalhold"); if (options.prefix === "") options.prefix = void 0; const updatedOptions = Object.assign(Object.assign({}, options), include$1.length > 0 ? { include: include$1 } : {}); const iter = this.listItems(updatedOptions); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); } }; } /** * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ listHierarchySegments(delimiter_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter$1, marker$1, options = {}) { let listBlobsHierarchySegmentResponse; if (!!marker$1 || marker$1 === void 0) do { listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter$1, marker$1, options)); marker$1 = listBlobsHierarchySegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); } while (marker$1); }); } /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ listItemsByHierarchy(delimiter_1) { return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter$1, options = {}) { var _a$2, e_2, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter$1, marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const listBlobsHierarchySegmentResponse = _c$1; const segment = listBlobsHierarchySegmentResponse.segment; if (segment.blobPrefixes) for (const prefix$1 of segment.blobPrefixes) yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix$1)); for (const blob of segment.blobItems) yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list all the blobs by hierarchy. * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * * Example using `for await` syntax: * * ```js * for await (const item of containerClient.listBlobsByHierarchy("/")) { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` * * Example using `iter.next()`: * * ```js * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); * let entity = await iter.next(); * while (!entity.done) { * let item = entity.value; * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * console.log("Listing blobs by hierarchy by page"); * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { * const segment = response.segment; * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * * Example using paging with a max page size: * * ```js * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; * for await (const response of containerClient * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } * * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ listBlobsByHierarchy(delimiter$1, options = {}) { if (delimiter$1 === "") throw new RangeError("delimiter should contain one or more characters"); const include$1 = []; if (options.includeCopy) include$1.push("copy"); if (options.includeDeleted) include$1.push("deleted"); if (options.includeMetadata) include$1.push("metadata"); if (options.includeSnapshots) include$1.push("snapshots"); if (options.includeVersions) include$1.push("versions"); if (options.includeUncommitedBlobs) include$1.push("uncommittedblobs"); if (options.includeTags) include$1.push("tags"); if (options.includeDeletedWithVersions) include$1.push("deletedwithversions"); if (options.includeImmutabilityPolicy) include$1.push("immutabilitypolicy"); if (options.includeLegalHold) include$1.push("legalhold"); if (options.prefix === "") options.prefix = void 0; const updatedOptions = Object.assign(Object.assign({}, options), include$1.length > 0 ? { include: include$1 } : {}); const iter = this.listItemsByHierarchy(delimiter$1, updatedOptions); return { async next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.listHierarchySegments(delimiter$1, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); } }; } /** * The Filter Blobs operation enables callers to list blobs in the container whose tags * match a given search expression. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker$1, options = {}) { return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker$1, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a$2; let tagValue = ""; if (((_a$2 = blob.tags) === null || _a$2 === void 0 ? void 0 : _a$2.blobTagSet.length) === 1) tagValue = blob.tags.blobTagSet[0].value; return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker$1, options = {}) { let response; if (!!marker$1 || marker$1 === void 0) do { response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker$1, options)); response.blobs = response.blobs || []; marker$1 = response.continuationToken; yield yield tslib.__await(response); } while (marker$1); }); } /** * Returns an AsyncIterableIterator for blobs. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ findBlobsByTagsItems(tagFilterSqlExpression_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { var _a$2, e_3, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const segment = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_3) throw e_3.error; } } }); } /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified container. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = containerClient * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { const listSegmentOptions = Object.assign({}, options); const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); } }; } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.containerContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } getContainerNameFromUrl() { let containerName; try { const parsedUrl = new URL(this.url); if (parsedUrl.hostname.split(".")[1] === "blob") containerName = parsedUrl.pathname.split("/")[1]; else if (isIpEndpointStyle(parsedUrl)) containerName = parsedUrl.pathname.split("/")[2]; else containerName = parsedUrl.pathname.split("/")[1]; containerName = decodeURIComponent(containerName); if (!containerName) throw new Error("Provided containerName is invalid."); return containerName; } catch (error) { throw new Error("Unable to extract containerName with provided information."); } } /** * Only available for ContainerClient constructed with a shared key credential. * * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { if (!(this.credential instanceof StorageSharedKeyCredential)) throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for ContainerClient constructed with a shared key credential. * * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasStringToSign(options) { if (!(this.credential instanceof StorageSharedKeyCredential)) throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; } /** * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; } /** * Creates a BlobBatchClient object to conduct batch operations. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this container. */ getBlobBatchClient() { return new BlobBatchClient(this.url, this.pipeline); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the * values are set, this should be serialized with toString and set as the permissions field on an * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ var AccountSASPermissions = class AccountSASPermissions { constructor() { /** * Permission to read resources and list queues and tables granted. */ this.read = false; /** * Permission to write resources granted. */ this.write = false; /** * Permission to delete blobs and files granted. */ this.delete = false; /** * Permission to delete versions granted. */ this.deleteVersion = false; /** * Permission to list blob containers, blobs, shares, directories, and files granted. */ this.list = false; /** * Permission to add messages, table entities, and append to blobs granted. */ this.add = false; /** * Permission to create blobs and files granted. */ this.create = false; /** * Permissions to update messages and table entities granted. */ this.update = false; /** * Permission to get and delete messages granted. */ this.process = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Permission to filter blobs. */ this.filter = false; /** * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. * * @param permissions - */ static parse(permissions) { const accountSASPermissions = new AccountSASPermissions(); for (const c of permissions) switch (c) { case "r": accountSASPermissions.read = true; break; case "w": accountSASPermissions.write = true; break; case "d": accountSASPermissions.delete = true; break; case "x": accountSASPermissions.deleteVersion = true; break; case "l": accountSASPermissions.list = true; break; case "a": accountSASPermissions.add = true; break; case "c": accountSASPermissions.create = true; break; case "u": accountSASPermissions.update = true; break; case "p": accountSASPermissions.process = true; break; case "t": accountSASPermissions.tag = true; break; case "f": accountSASPermissions.filter = true; break; case "i": accountSASPermissions.setImmutabilityPolicy = true; break; case "y": accountSASPermissions.permanentDelete = true; break; default: throw new RangeError(`Invalid permission character: ${c}`); } return accountSASPermissions; } /** * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const accountSASPermissions = new AccountSASPermissions(); if (permissionLike.read) accountSASPermissions.read = true; if (permissionLike.write) accountSASPermissions.write = true; if (permissionLike.delete) accountSASPermissions.delete = true; if (permissionLike.deleteVersion) accountSASPermissions.deleteVersion = true; if (permissionLike.filter) accountSASPermissions.filter = true; if (permissionLike.tag) accountSASPermissions.tag = true; if (permissionLike.list) accountSASPermissions.list = true; if (permissionLike.add) accountSASPermissions.add = true; if (permissionLike.create) accountSASPermissions.create = true; if (permissionLike.update) accountSASPermissions.update = true; if (permissionLike.process) accountSASPermissions.process = true; if (permissionLike.setImmutabilityPolicy) accountSASPermissions.setImmutabilityPolicy = true; if (permissionLike.permanentDelete) accountSASPermissions.permanentDelete = true; return accountSASPermissions; } /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. * * Using this method will guarantee the resource types are in * an order accepted by the service. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ toString() { const permissions = []; if (this.read) permissions.push("r"); if (this.write) permissions.push("w"); if (this.delete) permissions.push("d"); if (this.deleteVersion) permissions.push("x"); if (this.filter) permissions.push("f"); if (this.tag) permissions.push("t"); if (this.list) permissions.push("l"); if (this.add) permissions.push("a"); if (this.create) permissions.push("c"); if (this.update) permissions.push("u"); if (this.process) permissions.push("p"); if (this.setImmutabilityPolicy) permissions.push("i"); if (this.permanentDelete) permissions.push("y"); return permissions.join(""); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the * values are set, this should be serialized with toString and set as the resources field on an * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but * the order of the resources is particular and this class guarantees correctness. */ var AccountSASResourceTypes = class AccountSASResourceTypes { constructor() { /** * Permission to access service level APIs granted. */ this.service = false; /** * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. */ this.container = false; /** * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. */ this.object = false; } /** * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an * Error if it encounters a character that does not correspond to a valid resource type. * * @param resourceTypes - */ static parse(resourceTypes) { const accountSASResourceTypes = new AccountSASResourceTypes(); for (const c of resourceTypes) switch (c) { case "s": accountSASResourceTypes.service = true; break; case "c": accountSASResourceTypes.container = true; break; case "o": accountSASResourceTypes.object = true; break; default: throw new RangeError(`Invalid resource type: ${c}`); } return accountSASResourceTypes; } /** * Converts the given resource types to a string. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ toString() { const resourceTypes = []; if (this.service) resourceTypes.push("s"); if (this.container) resourceTypes.push("c"); if (this.object) resourceTypes.push("o"); return resourceTypes.join(""); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant access to that service. Once all the * values are set, this should be serialized with toString and set as the services field on an * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but * the order of the services is particular and this class guarantees correctness. */ var AccountSASServices = class AccountSASServices { constructor() { /** * Permission to access blob resources granted. */ this.blob = false; /** * Permission to access file resources granted. */ this.file = false; /** * Permission to access queue resources granted. */ this.queue = false; /** * Permission to access table resources granted. */ this.table = false; } /** * Creates an {@link AccountSASServices} from the specified services string. This method will throw an * Error if it encounters a character that does not correspond to a valid service. * * @param services - */ static parse(services) { const accountSASServices = new AccountSASServices(); for (const c of services) switch (c) { case "b": accountSASServices.blob = true; break; case "f": accountSASServices.file = true; break; case "q": accountSASServices.queue = true; break; case "t": accountSASServices.table = true; break; default: throw new RangeError(`Invalid service character: ${c}`); } return accountSASServices; } /** * Converts the given services to a string. * */ toString() { const services = []; if (this.blob) services.push("b"); if (this.table) services.push("t"); if (this.queue) services.push("q"); if (this.file) services.push("f"); return services.join(""); } }; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual * REST request. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * * @param accountSASSignatureValues - * @param sharedKeyCredential - */ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential).sasQueryParameters; } function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { const version$1 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version$1 < "2020-08-04") throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version$1 < "2019-10-10") throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version$1 < "2019-10-10") throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version$1 < "2019-12-12") throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version$1 < "2019-12-12") throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); if (accountSASSignatureValues.encryptionScope && version$1 < "2020-12-06") throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); let stringToSign; if (version$1 >= "2020-12-06") stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", version$1, accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", "" ].join("\n"); else stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", version$1, "" ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(version$1, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope), stringToSign }; } /** * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you * to manipulate blob containers. */ var BlobServiceClient = class BlobServiceClient extends StorageClient { /** * * Creates an instance of BlobServiceClient from connection string. * * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. * [ Note - Account connection string can only be used in NODE.JS runtime. ] * Account connection string example - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` * SAS connection string example - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` * @param options - Optional. Options to configure the HTTP pipeline. */ static fromConnectionString(connectionString, options) { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } else throw new Error("Account connection string is only supported in Node.js environment"); else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } constructor(url$1, credentialOrPipeline, options) { let pipeline; if (isPipelineLike(credentialOrPipeline)) pipeline = credentialOrPipeline; else if (coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipeline)) pipeline = newPipeline(credentialOrPipeline, options); else pipeline = newPipeline(new AnonymousCredential(), options); super(url$1, pipeline); this.serviceContext = this.storageClientContext.service; } /** * Creates a {@link ContainerClient} object * * @param containerName - A container name * @returns A new ContainerClient object for the given container name. * * Example usage: * * ```js * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ getContainerClient(containerName) { return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } /** * Create a Blob container. @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-container * * @param containerName - Name of the container to create. * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, containerCreateResponse }; }); } /** * Deletes a Blob container. * * @param containerName - Name of the container to delete. * @param options - Options to configure Container Delete operation. * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); return containerClient.delete(updatedOptions); }); } /** * Restore a previously deleted Blob container. * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. * * @param deletedContainerName - Name of the previously deleted container. * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ async undeleteContainer(deletedContainerName$1, deletedContainerVersion$1, options = {}) { return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName$1); const containerContext = containerClient["storageClientContext"].container; const containerUndeleteResponse = assertResponse(await containerContext.restore({ deletedContainerName: deletedContainerName$1, deletedContainerVersion: deletedContainerVersion$1, tracingOptions: updatedOptions.tracingOptions })); return { containerClient, containerUndeleteResponse }; }); } /** * Rename an existing Blob Container. * * @param sourceContainerName - The name of the source container. * @param destinationContainerName - The new name of the container. * @param options - Options to configure Container Rename operation. */ async renameContainer(sourceContainerName$1, destinationContainerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { var _a$2; const containerClient = this.getContainerClient(destinationContainerName); const containerContext = containerClient["storageClientContext"].container; const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName$1, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a$2 = options.sourceCondition) === null || _a$2 === void 0 ? void 0 : _a$2.leaseId }))); return { containerClient, containerRenameResponse }; }); } /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties * * @param properties - * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.setProperties(properties, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant * replication is enabled for the storage account. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats * * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getStatistics({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); }); } /** * Returns a list of the containers under the specified account. * @see https://learn.microsoft.com/en-us/rest/api/storageservices/list-containers2 * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all containers remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ async listContainersSegment(marker$1, options = {}) { return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker$1 }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); }); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags * match a given search expression. Filter blobs searches across all containers within a * storage account but can be scoped within the expression to a single container. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker$1, options = {}) { return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { const response = assertResponse(await this.serviceContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker$1, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a$2; let tagValue = ""; if (((_a$2 = blob.tags) === null || _a$2 === void 0 ? void 0 : _a$2.blobTagSet.length) === 1) tagValue = blob.tags.blobTagSet[0].value; return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker$1, options = {}) { let response; if (!!marker$1 || marker$1 === void 0) do { response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker$1, options)); response.blobs = response.blobs || []; marker$1 = response.continuationToken; yield yield tslib.__await(response); } while (marker$1); }); } /** * Returns an AsyncIterableIterator for blobs. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ findBlobsByTagsItems(tagFilterSqlExpression_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { var _a$2, e_1, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const segment = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${container.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = blobServiceClient * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { const listSegmentOptions = Object.assign({}, options); const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); } }; } /** * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all containers remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ listSegments(marker_1) { return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker$1, options = {}) { let listContainersSegmentResponse; if (!!marker$1 || marker$1 === void 0) do { listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker$1, options)); listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; marker$1 = listContainersSegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); } while (marker$1); }); } /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ listItems() { return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { var _a$2, e_2, _b$1, _c$1; let marker$1; try { for (var _d$1 = true, _e = tslib.__asyncValues(this.listSegments(marker$1, options)), _f; _f = yield tslib.__await(_e.next()), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const segment = _c$1; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield tslib.__await(_b$1.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list all the containers * under the specified account. * * .byPage() returns an async iterable iterator to list the containers in pages. * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const container of blobServiceClient.listContainers()) { * console.log(`Container ${i++}: ${container.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = blobServiceClient.listContainers(); * let containerItem = await iter.next(); * while (!containerItem.done) { * console.log(`Container ${i++}: ${containerItem.value.name}`); * containerItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 container names * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = blobServiceClient * .listContainers() * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 container names * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` * * @param options - Options to list containers. * @returns An asyncIterableIterator that supports paging. */ listContainers(options = {}) { if (options.prefix === "") options.prefix = void 0; const include$1 = []; if (options.includeDeleted) include$1.push("deleted"); if (options.includeMetadata) include$1.push("metadata"); if (options.includeSystem) include$1.push("system"); const listSegmentOptions = Object.assign(Object.assign({}, options), include$1.length > 0 ? { include: include$1 } : {}); const iter = this.listItems(listSegmentOptions); return { next() { return iter.next(); }, [Symbol.asyncIterator]() { return this; }, byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); } }; } /** * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key * * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ async getUserDelegationKey(startsOn, expiresOn$1, options = {}) { return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { const response = assertResponse(await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), expiresOn: truncatedISO8061Date(expiresOn$1, false) }, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions })); const userDelegationKey = { signedObjectId: response.signedObjectId, signedTenantId: response.signedTenantId, signedStartsOn: new Date(response.signedStartsOn), signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, value: response.value }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; }); } /** * Creates a BlobBatchClient object to conduct batch operations. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this service. */ getBlobBatchClient() { return new BlobBatchClient(this.url, this.pipeline); } /** * Only available for BlobServiceClient constructed with a shared key credential. * * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. * @param resourceTypes - Specifies the resource types associated with the shared access signature. * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateAccountSasUrl(expiresOn$1, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { if (!(this.credential instanceof StorageSharedKeyCredential)) throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); if (expiresOn$1 === void 0) { const now = new Date(); expiresOn$1 = new Date(now.getTime() + 3600 * 1e3); } const sas = generateAccountSASQueryParameters(Object.assign({ permissions, expiresOn: expiresOn$1, resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); return appendToURLQuery(this.url, sas); } /** * Only available for BlobServiceClient constructed with a shared key credential. * * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://learn.microsoft.com/en-us/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. * @param resourceTypes - Specifies the resource types associated with the shared access signature. * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasStringToSign(expiresOn$1, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { if (!(this.credential instanceof StorageSharedKeyCredential)) throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); if (expiresOn$1 === void 0) { const now = new Date(); expiresOn$1 = new Date(now.getTime() + 3600 * 1e3); } return generateAccountSASQueryParametersInternal(Object.assign({ permissions, expiresOn: expiresOn$1, resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign; } }; /** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ exports.KnownEncryptionAlgorithmType = void 0; (function(KnownEncryptionAlgorithmType) { KnownEncryptionAlgorithmType["AES256"] = "AES256"; })(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); Object.defineProperty(exports, "RestError", { enumerable: true, get: function() { return coreRestPipeline.RestError; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; exports.AccountSASServices = AccountSASServices; exports.AnonymousCredential = AnonymousCredential; exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; exports.AppendBlobClient = AppendBlobClient; exports.BaseRequestPolicy = BaseRequestPolicy; exports.BlobBatch = BlobBatch; exports.BlobBatchClient = BlobBatchClient; exports.BlobClient = BlobClient; exports.BlobLeaseClient = BlobLeaseClient; exports.BlobSASPermissions = BlobSASPermissions; exports.BlobServiceClient = BlobServiceClient; exports.BlockBlobClient = BlockBlobClient; exports.ContainerClient = ContainerClient; exports.ContainerSASPermissions = ContainerSASPermissions; exports.Credential = Credential; exports.CredentialPolicy = CredentialPolicy; exports.PageBlobClient = PageBlobClient; exports.Pipeline = Pipeline; exports.SASQueryParameters = SASQueryParameters; exports.StorageBrowserPolicy = StorageBrowserPolicy; exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; exports.StorageOAuthScopes = StorageOAuthScopes; exports.StorageRetryPolicy = StorageRetryPolicy; exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; exports.StorageSharedKeyCredential = StorageSharedKeyCredential; exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; exports.isPipelineLike = isPipelineLike; exports.logger = logger; exports.newPipeline = newPipeline; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/errors.js var require_errors = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/errors.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.CacheNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0; var FilesNotFoundError = class extends Error { constructor(files = []) { let message = "No files were found to upload"; if (files.length > 0) message += `: ${files.join(", ")}`; super(message); this.files = files; this.name = "FilesNotFoundError"; } }; exports.FilesNotFoundError = FilesNotFoundError; var InvalidResponseError = class extends Error { constructor(message) { super(message); this.name = "InvalidResponseError"; } }; exports.InvalidResponseError = InvalidResponseError; var CacheNotFoundError = class extends Error { constructor(message = "Cache not found") { super(message); this.name = "CacheNotFoundError"; } }; exports.CacheNotFoundError = CacheNotFoundError; var GHESNotSupportedError = class extends Error { constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { super(message); this.name = "GHESNotSupportedError"; } }; exports.GHESNotSupportedError = GHESNotSupportedError; var NetworkError = class extends Error { constructor(code) { const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; super(message); this.code = code; this.name = "NetworkError"; } }; exports.NetworkError = NetworkError; NetworkError.isNetworkErrorCode = (code) => { if (!code) return false; return [ "ECONNRESET", "ENOTFOUND", "ETIMEDOUT", "ECONNREFUSED", "EHOSTUNREACH" ].includes(code); }; var UsageError = class extends Error { constructor() { const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; super(message); this.name = "UsageError"; } }; exports.UsageError = UsageError; UsageError.isUsageErrorMessage = (msg) => { if (!msg) return false; return msg.includes("insufficient usage"); }; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/uploadUtils.js var require_uploadUtils = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports) { var __createBinding$13 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$13 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$13 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$13(result, mod, k); } __setModuleDefault$13(result, mod); return result; }; var __awaiter$14 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0; const core$8 = __importStar$13(require_core()); const storage_blob_1$1 = require_dist$1(); const errors_1$1 = require_errors(); /** * Class for tracking the upload state and displaying stats. */ var UploadProgress = class { constructor(contentLength$1) { this.contentLength = contentLength$1; this.sentBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Sets the number of bytes sent * * @param sentBytes the number of bytes sent */ setSentBytes(sentBytes) { this.sentBytes = sentBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.sentBytes; } /** * Returns true if the upload is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current upload stats. Once the upload completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) return; const transferredBytes = this.sentBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); core$8.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) this.displayedComplete = true; } /** * Returns a function used to handle TransferProgressEvents. */ onProgress() { return (progress) => { this.setSentBytes(progress.loadedBytes); }; } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1e3) { const displayCallback = () => { this.display(); if (!this.isDone()) this.timeoutHandle = setTimeout(displayCallback, delayInMs); }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the upload * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = void 0; } this.display(); } }; exports.UploadProgress = UploadProgress; /** * Uploads a cache archive directly to Azure Blob Storage using the Azure SDK. * This function will display progress information to the console. Concurrency of the * upload is determined by the calling functions. * * @param signedUploadURL * @param archivePath * @param options * @returns */ function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { var _a$2; return __awaiter$14(this, void 0, void 0, function* () { const blobClient = new storage_blob_1$1.BlobClient(signedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); const uploadProgress = new UploadProgress((_a$2 = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a$2 !== void 0 ? _a$2 : 0); const uploadOptions = { blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, maxSingleShotSize: 128 * 1024 * 1024, onProgress: uploadProgress.onProgress() }; try { uploadProgress.startDisplayTimer(); core$8.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); if (response._response.status >= 400) throw new errors_1$1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); return response; } catch (error) { core$8.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`); throw error; } finally { uploadProgress.stopDisplayTimer(); } }); } exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/requestUtils.js var require_requestUtils = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/requestUtils.js"(exports) { var __createBinding$12 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$12 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$12 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$12(result, mod, k); } __setModuleDefault$12(result, mod); return result; }; var __awaiter$13 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core$7 = __importStar$12(require_core()); const http_client_1$3 = require_lib(); const constants_1$3 = require_constants$3(); function isSuccessStatusCode(statusCode) { if (!statusCode) return false; return statusCode >= 200 && statusCode < 300; } exports.isSuccessStatusCode = isSuccessStatusCode; function isServerErrorStatusCode(statusCode) { if (!statusCode) return true; return statusCode >= 500; } exports.isServerErrorStatusCode = isServerErrorStatusCode; function isRetryableStatusCode(statusCode) { if (!statusCode) return false; const retryableStatusCodes = [ http_client_1$3.HttpCodes.BadGateway, http_client_1$3.HttpCodes.ServiceUnavailable, http_client_1$3.HttpCodes.GatewayTimeout ]; return retryableStatusCodes.includes(statusCode); } exports.isRetryableStatusCode = isRetryableStatusCode; function sleep(milliseconds) { return __awaiter$13(this, void 0, void 0, function* () { return new Promise((resolve) => setTimeout(resolve, milliseconds)); }); } function retry(name, method, getStatusCode, maxAttempts = constants_1$3.DefaultRetryAttempts, delay$4 = constants_1$3.DefaultRetryDelay, onError = void 0) { return __awaiter$13(this, void 0, void 0, function* () { let errorMessage = ""; let attempt = 1; while (attempt <= maxAttempts) { let response = void 0; let statusCode = void 0; let isRetryable = false; try { response = yield method(); } catch (error) { if (onError) response = onError(error); isRetryable = true; errorMessage = error.message; } if (response) { statusCode = getStatusCode(response); if (!isServerErrorStatusCode(statusCode)) return response; } if (statusCode) { isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } core$7.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { core$7.debug(`${name} - Error is not retryable`); break; } yield sleep(delay$4); attempt++; } throw Error(`${name} failed: ${errorMessage}`); }); } exports.retry = retry; function retryTypedResponse(name, method, maxAttempts = constants_1$3.DefaultRetryAttempts, delay$4 = constants_1$3.DefaultRetryDelay) { return __awaiter$13(this, void 0, void 0, function* () { return yield retry( name, method, (response) => response.statusCode, maxAttempts, delay$4, // If the error object contains the statusCode property, extract it and return // an TypedResponse so it can be processed by the retry logic. (error) => { if (error instanceof http_client_1$3.HttpClientError) return { statusCode: error.statusCode, result: null, headers: {}, error }; else return void 0; } ); }); } exports.retryTypedResponse = retryTypedResponse; function retryHttpClientResponse(name, method, maxAttempts = constants_1$3.DefaultRetryAttempts, delay$4 = constants_1$3.DefaultRetryDelay) { return __awaiter$13(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay$4); }); } exports.retryHttpClientResponse = retryHttpClientResponse; } }); //#endregion //#region node_modules/.deno/@azure+abort-controller@1.1.0/node_modules/@azure/abort-controller/dist/index.js var require_dist = __commonJS({ "node_modules/.deno/@azure+abort-controller@1.1.0/node_modules/@azure/abort-controller/dist/index.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); const listenersMap = new WeakMap(); const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation * cannot or will not ever be cancelled. * * @example * Abort without timeout * ```ts * await doAsyncWork(AbortSignal.none); * ``` */ var AbortSignal$1 = class AbortSignal$1 { constructor() { /** * onabort event listener. */ this.onabort = null; listenersMap.set(this, []); abortedMap.set(this, false); } /** * Status of whether aborted or not. * * @readonly */ get aborted() { if (!abortedMap.has(this)) throw new TypeError("Expected `this` to be an instance of AbortSignal."); return abortedMap.get(this); } /** * Creates a new AbortSignal instance that will never be aborted. * * @readonly */ static get none() { return new AbortSignal$1(); } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ addEventListener(_type, listener) { if (!listenersMap.has(this)) throw new TypeError("Expected `this` to be an instance of AbortSignal."); const listeners = listenersMap.get(this); listeners.push(listener); } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ removeEventListener(_type, listener) { if (!listenersMap.has(this)) throw new TypeError("Expected `this` to be an instance of AbortSignal."); const listeners = listenersMap.get(this); const index = listeners.indexOf(listener); if (index > -1) listeners.splice(index, 1); } /** * Dispatches a synthetic event to the AbortSignal. */ dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); } }; /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. * * - If there is a timeout, the timer will be cancelled. * - If aborted is true, nothing will happen. * * @internal */ function abortSignal(signal) { if (signal.aborted) return; if (signal.onabort) signal.onabort.call(signal); const listeners = listenersMap.get(signal); if (listeners) listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); abortedMap.set(signal, true); } /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ var AbortError = class extends Error { constructor(message) { super(message); this.name = "AbortError"; } }; /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. * * @example * Abort an operation when another event fires * ```ts * const controller = new AbortController(); * const signal = controller.signal; * doAsyncWork(signal); * button.addEventListener('click', () => controller.abort()); * ``` * * @example * Share aborter cross multiple operations in 30s * ```ts * // Upload the same data to 2 different data centers at the same time, * // abort another when any of them is finished * const controller = AbortController.withTimeout(30 * 1000); * doAsyncWork(controller.signal).then(controller.abort); * doAsyncWork(controller.signal).then(controller.abort); *``` * * @example * Cascaded aborting * ```ts * // All operations can't take more than 30 seconds * const aborter = Aborter.timeout(30 * 1000); * * // Following 2 operations can't take more than 25 seconds * await doAsyncWork(aborter.withTimeout(25 * 1000)); * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ var AbortController$1 = class { constructor(parentSignals) { this._signal = new AbortSignal$1(); if (!parentSignals) return; if (!Array.isArray(parentSignals)) parentSignals = arguments; for (const parentSignal of parentSignals) if (parentSignal.aborted) this.abort(); else parentSignal.addEventListener("abort", () => { this.abort(); }); } /** * The AbortSignal associated with this controller that will signal aborted * when the abort method is called on this controller. * * @readonly */ get signal() { return this._signal; } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ abort() { abortSignal(this._signal); } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ static timeout(ms) { const signal = new AbortSignal$1(); const timer = setTimeout(abortSignal, ms, signal); if (typeof timer.unref === "function") timer.unref(); return signal; } }; exports.AbortController = AbortController$1; exports.AbortError = AbortError; exports.AbortSignal = AbortSignal$1; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/downloadUtils.js var require_downloadUtils = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports) { var __createBinding$11 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$11 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$11 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$11(result, mod, k); } __setModuleDefault$11(result, mod); return result; }; var __awaiter$12 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core$6 = __importStar$11(require_core()); const http_client_1$2 = require_lib(); const storage_blob_1 = require_dist$1(); const buffer = __importStar$11(__require("buffer")); const fs$3 = __importStar$11(__require("fs")); const stream$1 = __importStar$11(__require("stream")); const util$1 = __importStar$11(__require("util")); const utils$3 = __importStar$11(require_cacheUtils()); const constants_1$2 = require_constants$3(); const requestUtils_1$1 = require_requestUtils(); const abort_controller_1 = require_dist(); /** * Pipes the body of a HTTP response to a stream * * @param response the HTTP response * @param output the writable stream */ function pipeResponseToStream(response, output) { return __awaiter$12(this, void 0, void 0, function* () { const pipeline = util$1.promisify(stream$1.pipeline); yield pipeline(response.message, output); }); } /** * Class for tracking the download state and displaying stats. */ var DownloadProgress = class { constructor(contentLength$1) { this.contentLength = contentLength$1; this.segmentIndex = 0; this.segmentSize = 0; this.segmentOffset = 0; this.receivedBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Progress to the next segment. Only call this method when the previous segment * is complete. * * @param segmentSize the length of the next segment */ nextSegment(segmentSize) { this.segmentOffset = this.segmentOffset + this.segmentSize; this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; core$6.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. * * @param receivedBytes the number of bytes received */ setReceivedBytes(receivedBytes) { this.receivedBytes = receivedBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.segmentOffset + this.receivedBytes; } /** * Returns true if the download is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current download stats. Once the download completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) return; const transferredBytes = this.segmentOffset + this.receivedBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); core$6.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) this.displayedComplete = true; } /** * Returns a function used to handle TransferProgressEvents. */ onProgress() { return (progress) => { this.setReceivedBytes(progress.loadedBytes); }; } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1e3) { const displayCallback = () => { this.display(); if (!this.isDone()) this.timeoutHandle = setTimeout(displayCallback, delayInMs); }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the download * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = void 0; } this.display(); } }; exports.DownloadProgress = DownloadProgress; /** * Download the cache using the Actions toolkit http-client * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter$12(this, void 0, void 0, function* () { const writeStream = fs$3.createWriteStream(archivePath); const httpClient = new http_client_1$2.HttpClient("actions/cache"); const downloadResponse = yield (0, requestUtils_1$1.retryHttpClientResponse)("downloadCache", () => __awaiter$12(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); downloadResponse.message.socket.setTimeout(constants_1$2.SocketTimeout, () => { downloadResponse.message.destroy(); core$6.debug(`Aborting download, socket timed out after ${constants_1$2.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); const contentLengthHeader = downloadResponse.message.headers["content-length"]; if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); const actualLength = utils$3.getArchiveFileSizeInBytes(archivePath); if (actualLength !== expectedLength) throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } else core$6.debug("Unable to validate download, no Content-Length header"); }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; /** * Download the cache using the Actions toolkit http-client concurrently * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a$2; return __awaiter$12(this, void 0, void 0, function* () { const archiveDescriptor = yield fs$3.promises.open(archivePath, "w"); const httpClient = new http_client_1$2.HttpClient("actions/cache", void 0, { socketTimeout: options.timeoutInMs, keepAlive: true }); try { const res = yield (0, requestUtils_1$1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter$12(this, void 0, void 0, function* () { return yield httpClient.request("HEAD", archiveLocation, null, {}); })); const lengthHeader = res.message.headers["content-length"]; if (lengthHeader === void 0 || lengthHeader === null) throw new Error("Content-Length not found on blob response"); const length = parseInt(lengthHeader); if (Number.isNaN(length)) throw new Error(`Could not interpret Content-Length: ${length}`); const downloads = []; const blockSize = 4 * 1024 * 1024; for (let offset = 0; offset < length; offset += blockSize) { const count = Math.min(blockSize, length - offset); downloads.push({ offset, promiseGetter: () => __awaiter$12(this, void 0, void 0, function* () { return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); }) }); } downloads.reverse(); let actives = 0; let bytesDownloaded = 0; const progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; let nextDownload; const waitAndWrite = () => __awaiter$12(this, void 0, void 0, function* () { const segment = yield Promise.race(Object.values(activeDownloads)); yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); actives--; delete activeDownloads[segment.offset]; bytesDownloaded += segment.count; progressFn({ loadedBytes: bytesDownloaded }); }); while (nextDownload = downloads.pop()) { activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); actives++; if (actives >= ((_a$2 = options.downloadConcurrency) !== null && _a$2 !== void 0 ? _a$2 : 10)) yield waitAndWrite(); } while (actives > 0) yield waitAndWrite(); } finally { httpClient.dispose(); yield archiveDescriptor.close(); } }); } exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { return __awaiter$12(this, void 0, void 0, function* () { const retries = 5; let failures = 0; while (true) try { const timeout = 3e4; const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); if (typeof result === "string") throw new Error("downloadSegmentRetry failed due to timeout"); return result; } catch (err) { if (failures >= retries) throw err; failures++; } }); } function downloadSegment(httpClient, archiveLocation, offset, count) { return __awaiter$12(this, void 0, void 0, function* () { const partRes = yield (0, requestUtils_1$1.retryHttpClientResponse)("downloadCachePart", () => __awaiter$12(this, void 0, void 0, function* () { return yield httpClient.get(archiveLocation, { Range: `bytes=${offset}-${offset + count - 1}` }); })); if (!partRes.readBodyBuffer) throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); return { offset, count, buffer: yield partRes.readBodyBuffer() }; }); } /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved * @param options the download options with the defaults set */ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { var _a$2; return __awaiter$12(this, void 0, void 0, function* () { const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { retryOptions: { tryTimeoutInMs: options.timeoutInMs } }); const properties = yield client.getProperties(); const contentLength$1 = (_a$2 = properties.contentLength) !== null && _a$2 !== void 0 ? _a$2 : -1; if (contentLength$1 < 0) { core$6.debug("Unable to determine content length, downloading file with http-client..."); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength$1); const fd = fs$3.openSync(archivePath, "w"); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); const abortSignal$1 = controller.signal; while (!downloadProgress.isDone()) { const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentSize = Math.min(maxSegmentSize, contentLength$1 - segmentStart); downloadProgress.nextSegment(segmentSize); const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { abortSignal: abortSignal$1, concurrency: options.downloadConcurrency, onProgress: downloadProgress.onProgress() })); if (result === "timeout") { controller.abort(); throw new Error("Aborting cache download as the download time exceeded the timeout."); } else if (Buffer.isBuffer(result)) fs$3.writeFileSync(fd, result); } } finally { downloadProgress.stopDisplayTimer(); fs$3.closeSync(fd); } } }); } exports.downloadCacheStorageSDK = downloadCacheStorageSDK; const promiseWithTimeout = (timeoutMs, promise) => __awaiter$12(void 0, void 0, void 0, function* () { let timeoutHandle; const timeoutPromise = new Promise((resolve) => { timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs); }); return Promise.race([promise, timeoutPromise]).then((result) => { clearTimeout(timeoutHandle); return result; }); }); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/options.js var require_options = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/options.js"(exports) { var __createBinding$10 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$10 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$10 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$10(result, mod, k); } __setModuleDefault$10(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.getDownloadOptions = exports.getUploadOptions = void 0; const core$5 = __importStar$10(require_core()); /** * Returns a copy of the upload options with defaults filled in. * * @param copy the original upload options */ function getUploadOptions(copy$1) { const result = { useAzureSdk: false, uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 }; if (copy$1) { if (typeof copy$1.useAzureSdk === "boolean") result.useAzureSdk = copy$1.useAzureSdk; if (typeof copy$1.uploadConcurrency === "number") result.uploadConcurrency = copy$1.uploadConcurrency; if (typeof copy$1.uploadChunkSize === "number") result.uploadChunkSize = copy$1.uploadChunkSize; } /** * Add env var overrides */ result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; core$5.debug(`Use Azure SDK: ${result.useAzureSdk}`); core$5.debug(`Upload concurrency: ${result.uploadConcurrency}`); core$5.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports.getUploadOptions = getUploadOptions; /** * Returns a copy of the download options with defaults filled in. * * @param copy the original download options */ function getDownloadOptions(copy$1) { const result = { useAzureSdk: false, concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 3e4, segmentTimeoutInMs: 6e5, lookupOnly: false }; if (copy$1) { if (typeof copy$1.useAzureSdk === "boolean") result.useAzureSdk = copy$1.useAzureSdk; if (typeof copy$1.concurrentBlobDownloads === "boolean") result.concurrentBlobDownloads = copy$1.concurrentBlobDownloads; if (typeof copy$1.downloadConcurrency === "number") result.downloadConcurrency = copy$1.downloadConcurrency; if (typeof copy$1.timeoutInMs === "number") result.timeoutInMs = copy$1.timeoutInMs; if (typeof copy$1.segmentTimeoutInMs === "number") result.segmentTimeoutInMs = copy$1.segmentTimeoutInMs; if (typeof copy$1.lookupOnly === "boolean") result.lookupOnly = copy$1.lookupOnly; } const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; core$5.debug(`Use Azure SDK: ${result.useAzureSdk}`); core$5.debug(`Download concurrency: ${result.downloadConcurrency}`); core$5.debug(`Request timeout (ms): ${result.timeoutInMs}`); core$5.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); core$5.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); core$5.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/config.js var require_config = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/config.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getCacheServiceURL = exports.getCacheServiceVersion = exports.isGhes = void 0; function isGhes() { const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); const hostname = ghUrl.hostname.trimEnd().toUpperCase(); const isGitHubHost = hostname === "GITHUB.COM"; const isGheHost = hostname.endsWith(".GHE.COM"); const isLocalHost = hostname.endsWith(".LOCALHOST"); return !isGitHubHost && !isGheHost && !isLocalHost; } exports.isGhes = isGhes; function getCacheServiceVersion() { if (isGhes()) return "v1"; return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; } exports.getCacheServiceVersion = getCacheServiceVersion; function getCacheServiceURL() { const version$1 = getCacheServiceVersion(); switch (version$1) { case "v1": return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; case "v2": return process.env["ACTIONS_RESULTS_URL"] || ""; default: throw new Error(`Unsupported cache service version: ${version$1}`); } } exports.getCacheServiceURL = getCacheServiceURL; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/package.json var require_package = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/package.json"(exports, module) { module.exports = { "name": "@actions/cache", "version": "4.0.3", "preview": true, "description": "Actions cache lib", "keywords": [ "github", "actions", "cache" ], "homepage": "https://github.com/actions/toolkit/tree/main/packages/cache", "license": "MIT", "main": "lib/cache.js", "types": "lib/cache.d.ts", "directories": { "lib": "lib", "test": "__tests__" }, "files": ["lib", "!.DS_Store"], "publishConfig": { "access": "public" }, "repository": { "type": "git", "url": "git+https://github.com/actions/toolkit.git", "directory": "packages/cache" }, "scripts": { "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", "test": "echo \"Error: run tests from root\" && exit 1", "tsc": "tsc" }, "bugs": { "url": "https://github.com/actions/toolkit/issues" }, "dependencies": { "@actions/core": "^1.11.1", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", "@actions/io": "^1.0.1", "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", "@protobuf-ts/plugin": "^2.9.4", "semver": "^6.3.1" }, "devDependencies": { "@types/node": "^22.13.9", "@types/semver": "^6.0.0", "typescript": "^5.2.2" } }; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/user-agent.js var require_user_agent = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getUserAgentString = void 0; const packageJson = require_package(); /** * Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package */ function getUserAgentString() { return `@actions/cache-${packageJson.version}`; } exports.getUserAgentString = getUserAgentString; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/cacheHttpClient.js var require_cacheHttpClient = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports) { var __createBinding$9 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$9 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$9 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$9(result, mod, k); } __setModuleDefault$9(result, mod); return result; }; var __awaiter$11 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = void 0; const core$4 = __importStar$9(require_core()); const http_client_1$1 = require_lib(); const auth_1$1 = require_auth(); const fs$2 = __importStar$9(__require("fs")); const url_1 = __require("url"); const utils$2 = __importStar$9(require_cacheUtils()); const uploadUtils_1 = require_uploadUtils(); const downloadUtils_1 = require_downloadUtils(); const options_1 = require_options(); const requestUtils_1 = require_requestUtils(); const config_1$2 = require_config(); const user_agent_1$1 = require_user_agent(); function getCacheApiUrl(resource) { const baseUrl = (0, config_1$2.getCacheServiceURL)(); if (!baseUrl) throw new Error("Cache Service Url not found, unable to restore cache."); const url$1 = `${baseUrl}_apis/artifactcache/${resource}`; core$4.debug(`Resource Url: ${url$1}`); return url$1; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { const requestOptions = { headers: { Accept: createAcceptHeader("application/json", "6.0-preview.1") } }; return requestOptions; } function createHttpClient() { const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const bearerCredentialHandler = new auth_1$1.BearerCredentialHandler(token); return new http_client_1$1.HttpClient((0, user_agent_1$1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); } function getCacheEntry(keys, paths, options) { return __awaiter$11(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version$1 = utils$2.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version$1}`; const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter$11(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 204) { if (core$4.isDebug()) yield printCachesListForDiagnostics(keys[0], httpClient, version$1); return null; } if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) throw new Error(`Cache service responded with ${response.statusCode}`); const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) throw new Error("Cache not found."); core$4.setSecret(cacheDownloadUrl); core$4.debug(`Cache Result:`); core$4.debug(JSON.stringify(cacheResult)); return cacheResult; }); } exports.getCacheEntry = getCacheEntry; function printCachesListForDiagnostics(key, httpClient, version$1) { return __awaiter$11(this, void 0, void 0, function* () { const resource = `caches?key=${encodeURIComponent(key)}`; const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter$11(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 200) { const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { core$4.debug(`No matching cache found for cache key '${key}', version '${version$1} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) core$4.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } }); } function downloadCache(archiveLocation, archivePath, options) { return __awaiter$11(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) if (downloadOptions.useAzureSdk) yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); else if (downloadOptions.concurrentBlobDownloads) yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); else yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); else yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); }); } exports.downloadCache = downloadCache; function reserveCache(key, paths, options) { return __awaiter$11(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version$1 = utils$2.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version: version$1, cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter$11(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); })); return response; }); } exports.reserveCache = reserveCache; function getContentRange(start, end) { return `bytes ${start}-${end}/*`; } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter$11(this, void 0, void 0, function* () { core$4.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) }; const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter$11(this, void 0, void 0, function* () { return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); })); if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); }); } function uploadFile(httpClient, cacheId, archivePath, options) { return __awaiter$11(this, void 0, void 0, function* () { const fileSize = utils$2.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs$2.openSync(archivePath, "r"); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils$2.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); const maxChunkSize = utils$2.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; core$4.debug("Awaiting all uploads"); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter$11(this, void 0, void 0, function* () { while (offset < fileSize) { const chunkSize = Math.min(fileSize - offset, maxChunkSize); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; yield uploadChunk(httpClient, resourceUrl, () => fs$2.createReadStream(archivePath, { fd, start, end, autoClose: false }).on("error", (error) => { throw new Error(`Cache upload failed because file read failed with ${error.message}`); }), start, end); } }))); } finally { fs$2.closeSync(fd); } return; }); } function commitCache(httpClient, cacheId, filesize) { return __awaiter$11(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter$11(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); })); }); } function saveCache$2(cacheId, archivePath, signedUploadURL, options) { return __awaiter$11(this, void 0, void 0, function* () { const uploadOptions = (0, options_1.getUploadOptions)(options); if (uploadOptions.useAzureSdk) { if (!signedUploadURL) throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); core$4.debug("Upload cache"); yield uploadFile(httpClient, cacheId, archivePath, options); core$4.debug("Commiting cache"); const cacheSize = utils$2.getArchiveFileSizeInBytes(archivePath); core$4.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); core$4.info("Cache saved successfully"); } }); } exports.saveCache = saveCache$2; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js var require_json_typings = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.isJsonObject = exports.typeofJsonValue = void 0; /** * Get the type of a JSON value. * Distinguishes between array, null and object. */ function typeofJsonValue(value) { let t = typeof value; if (t == "object") { if (Array.isArray(value)) return "array"; if (value === null) return "null"; } return t; } exports.typeofJsonValue = typeofJsonValue; /** * Is this a JSON object (instead of an array or null)? */ function isJsonObject(value) { return value !== null && typeof value == "object" && !Array.isArray(value); } exports.isJsonObject = isJsonObject; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/base64.js var require_base64 = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.base64encode = exports.base64decode = void 0; let encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); let decTable = []; for (let i = 0; i < encTable.length; i++) decTable[encTable[i].charCodeAt(0)] = i; decTable["-".charCodeAt(0)] = encTable.indexOf("+"); decTable["_".charCodeAt(0)] = encTable.indexOf("/"); /** * Decodes a base64 string to a byte array. * * - ignores white-space, including line breaks and tabs * - allows inner padding (can decode concatenated base64 strings) * - does not require padding * - understands base64url encoding: * "-" instead of "+", * "_" instead of "/", * no padding */ function base64decode(base64Str) { let es = base64Str.length * 3 / 4; if (base64Str[base64Str.length - 2] == "=") es -= 2; else if (base64Str[base64Str.length - 1] == "=") es -= 1; let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; for (let i = 0; i < base64Str.length; i++) { b = decTable[base64Str.charCodeAt(i)]; if (b === void 0) switch (base64Str[i]) { case "=": groupPos = 0; case "\n": case "\r": case " ": case " ": continue; default: throw Error(`invalid base64 string.`); } switch (groupPos) { case 0: p = b; groupPos = 1; break; case 1: bytes[bytePos++] = p << 2 | (b & 48) >> 4; p = b; groupPos = 2; break; case 2: bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; p = b; groupPos = 3; break; case 3: bytes[bytePos++] = (p & 3) << 6 | b; groupPos = 0; break; } } if (groupPos == 1) throw Error(`invalid base64 string.`); return bytes.subarray(0, bytePos); } exports.base64decode = base64decode; /** * Encodes a byte array to a base64 string. * Adds padding at the end. * Does not insert newlines. */ function base64encode(bytes) { let base64$1 = "", groupPos = 0, b, p = 0; for (let i = 0; i < bytes.length; i++) { b = bytes[i]; switch (groupPos) { case 0: base64$1 += encTable[b >> 2]; p = (b & 3) << 4; groupPos = 1; break; case 1: base64$1 += encTable[p | b >> 4]; p = (b & 15) << 2; groupPos = 2; break; case 2: base64$1 += encTable[p | b >> 6]; base64$1 += encTable[b & 63]; groupPos = 0; break; } } if (groupPos) { base64$1 += encTable[p]; base64$1 += "="; if (groupPos == 1) base64$1 += "="; } return base64$1; } exports.base64encode = base64encode; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js var require_protobufjs_utf8 = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.utf8read = void 0; const fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); /** * @deprecated This function will no longer be exported with the next major * release, since protobuf-ts has switch to TextDecoder API. If you need this * function, please migrate to @protobufjs/utf8. For context, see * https://github.com/timostamm/protobuf-ts/issues/184 * * Reads UTF8 bytes as a string. * * See [protobufjs / utf8](https://github.com/protobufjs/protobuf.js/blob/9893e35b854621cce64af4bf6be2cff4fb892796/lib/utf8/index.js#L40) * * Copyright (c) 2016, Daniel Wirtz */ function utf8read(bytes) { if (bytes.length < 1) return ""; let pos = 0, parts = [], chunk = [], i = 0, t; let len = bytes.length; while (pos < len) { t = bytes[pos++]; if (t < 128) chunk[i++] = t; else if (t > 191 && t < 224) chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; else if (t > 239 && t < 365) { t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; chunk[i++] = 55296 + (t >> 10); chunk[i++] = 56320 + (t & 1023); } else chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; if (i > 8191) { parts.push(fromCharCodes(chunk)); i = 0; } } if (parts.length) { if (i) parts.push(fromCharCodes(chunk.slice(0, i))); return parts.join(""); } return fromCharCodes(chunk.slice(0, i)); } exports.utf8read = utf8read; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js var require_binary_format_contract = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.WireType = exports.mergeBinaryOptions = exports.UnknownFieldHandler = void 0; /** * This handler implements the default behaviour for unknown fields. * When reading data, unknown fields are stored on the message, in a * symbol property. * When writing data, the symbol property is queried and unknown fields * are serialized into the output again. */ var UnknownFieldHandler; (function(UnknownFieldHandler$1) { /** * The symbol used to store unknown fields for a message. * The property must conform to `UnknownFieldContainer`. */ UnknownFieldHandler$1.symbol = Symbol.for("protobuf-ts/unknown"); /** * Store an unknown field during binary read directly on the message. * This method is compatible with `BinaryReadOptions.readUnknownField`. */ UnknownFieldHandler$1.onRead = (typeName, message, fieldNo, wireType, data) => { let container = is(message) ? message[UnknownFieldHandler$1.symbol] : message[UnknownFieldHandler$1.symbol] = []; container.push({ no: fieldNo, wireType, data }); }; /** * Write unknown fields stored for the message to the writer. * This method is compatible with `BinaryWriteOptions.writeUnknownFields`. */ UnknownFieldHandler$1.onWrite = (typeName, message, writer) => { for (let { no, wireType, data } of UnknownFieldHandler$1.list(message)) writer.tag(no, wireType).raw(data); }; /** * List unknown fields stored for the message. * Note that there may be multiples fields with the same number. */ UnknownFieldHandler$1.list = (message, fieldNo) => { if (is(message)) { let all = message[UnknownFieldHandler$1.symbol]; return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; } return []; }; /** * Returns the last unknown field by field number. */ UnknownFieldHandler$1.last = (message, fieldNo) => UnknownFieldHandler$1.list(message, fieldNo).slice(-1)[0]; const is = (message) => message && Array.isArray(message[UnknownFieldHandler$1.symbol]); })(UnknownFieldHandler = exports.UnknownFieldHandler || (exports.UnknownFieldHandler = {})); /** * Merges binary write or read options. Later values override earlier values. */ function mergeBinaryOptions(a, b) { return Object.assign(Object.assign({}, a), b); } exports.mergeBinaryOptions = mergeBinaryOptions; /** * Protobuf binary format wire types. * * A wire type provides just enough information to find the length of the * following value. * * See https://developers.google.com/protocol-buffers/docs/encoding#structure */ var WireType; (function(WireType$1) { /** * Used for int32, int64, uint32, uint64, sint32, sint64, bool, enum */ WireType$1[WireType$1["Varint"] = 0] = "Varint"; /** * Used for fixed64, sfixed64, double. * Always 8 bytes with little-endian byte order. */ WireType$1[WireType$1["Bit64"] = 1] = "Bit64"; /** * Used for string, bytes, embedded messages, packed repeated fields * * Only repeated numeric types (types which use the varint, 32-bit, * or 64-bit wire types) can be packed. In proto3, such fields are * packed by default. */ WireType$1[WireType$1["LengthDelimited"] = 2] = "LengthDelimited"; /** * Used for groups * @deprecated */ WireType$1[WireType$1["StartGroup"] = 3] = "StartGroup"; /** * Used for groups * @deprecated */ WireType$1[WireType$1["EndGroup"] = 4] = "EndGroup"; /** * Used for fixed32, sfixed32, float. * Always 4 bytes with little-endian byte order. */ WireType$1[WireType$1["Bit32"] = 5] = "Bit32"; })(WireType = exports.WireType || (exports.WireType = {})); } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js var require_goog_varint = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.varint32read = exports.varint32write = exports.int64toString = exports.int64fromString = exports.varint64write = exports.varint64read = void 0; /** * Read a 64 bit varint as two JS numbers. * * Returns tuple: * [0]: low bits * [0]: high bits * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175 */ function varint64read() { let lowBits = 0; let highBits = 0; for (let shift = 0; shift < 28; shift += 7) { let b = this.buf[this.pos++]; lowBits |= (b & 127) << shift; if ((b & 128) == 0) { this.assertBounds(); return [lowBits, highBits]; } } let middleByte = this.buf[this.pos++]; lowBits |= (middleByte & 15) << 28; highBits = (middleByte & 112) >> 4; if ((middleByte & 128) == 0) { this.assertBounds(); return [lowBits, highBits]; } for (let shift = 3; shift <= 31; shift += 7) { let b = this.buf[this.pos++]; highBits |= (b & 127) << shift; if ((b & 128) == 0) { this.assertBounds(); return [lowBits, highBits]; } } throw new Error("invalid varint"); } exports.varint64read = varint64read; /** * Write a 64 bit varint, given as two JS numbers, to the given bytes array. * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344 */ function varint64write(lo, hi, bytes) { for (let i = 0; i < 28; i = i + 7) { const shift = lo >>> i; const hasNext = !(shift >>> 7 == 0 && hi == 0); const byte = (hasNext ? shift | 128 : shift) & 255; bytes.push(byte); if (!hasNext) return; } const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; const hasMoreBits = !(hi >> 3 == 0); bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); if (!hasMoreBits) return; for (let i = 3; i < 31; i = i + 7) { const shift = hi >>> i; const hasNext = !(shift >>> 7 == 0); const byte = (hasNext ? shift | 128 : shift) & 255; bytes.push(byte); if (!hasNext) return; } bytes.push(hi >>> 31 & 1); } exports.varint64write = varint64write; const TWO_PWR_32_DBL$1 = 65536 * 65536; /** * Parse decimal string of 64 bit integer value as two JS numbers. * * Returns tuple: * [0]: minus sign? * [1]: low bits * [2]: high bits * * Copyright 2008 Google Inc. */ function int64fromString(dec) { let minus = dec[0] == "-"; if (minus) dec = dec.slice(1); const base = 1e6; let lowBits = 0; let highBits = 0; function add1e6digit(begin, end) { const digit1e6 = Number(dec.slice(begin, end)); highBits *= base; lowBits = lowBits * base + digit1e6; if (lowBits >= TWO_PWR_32_DBL$1) { highBits = highBits + (lowBits / TWO_PWR_32_DBL$1 | 0); lowBits = lowBits % TWO_PWR_32_DBL$1; } } add1e6digit(-24, -18); add1e6digit(-18, -12); add1e6digit(-12, -6); add1e6digit(-6); return [ minus, lowBits, highBits ]; } exports.int64fromString = int64fromString; /** * Format 64 bit integer value (as two JS numbers) to decimal string. * * Copyright 2008 Google Inc. */ function int64toString(bitsLow, bitsHigh) { if (bitsHigh >>> 0 <= 2097151) return "" + (TWO_PWR_32_DBL$1 * bitsHigh + (bitsLow >>> 0)); let low = bitsLow & 16777215; let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; let high = bitsHigh >> 16 & 65535; let digitA = low + mid * 6777216 + high * 6710656; let digitB = mid + high * 8147497; let digitC = high * 2; let base = 1e7; if (digitA >= base) { digitB += Math.floor(digitA / base); digitA %= base; } if (digitB >= base) { digitC += Math.floor(digitB / base); digitB %= base; } function decimalFrom1e7(digit1e7, needLeadingZeros) { let partial = digit1e7 ? String(digit1e7) : ""; if (needLeadingZeros) return "0000000".slice(partial.length) + partial; return partial; } return decimalFrom1e7( digitC, /*needLeadingZeros=*/ 0 ) + decimalFrom1e7( digitB, /*needLeadingZeros=*/ digitC ) + decimalFrom1e7( digitA, /*needLeadingZeros=*/ 1 ); } exports.int64toString = int64toString; /** * Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)` * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144 */ function varint32write(value, bytes) { if (value >= 0) { while (value > 127) { bytes.push(value & 127 | 128); value = value >>> 7; } bytes.push(value); } else { for (let i = 0; i < 9; i++) { bytes.push(value & 127 | 128); value = value >> 7; } bytes.push(1); } } exports.varint32write = varint32write; /** * Read an unsigned 32 bit varint. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220 */ function varint32read() { let b = this.buf[this.pos++]; let result = b & 127; if ((b & 128) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 127) << 7; if ((b & 128) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 127) << 14; if ((b & 128) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 127) << 21; if ((b & 128) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 15) << 28; for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) b = this.buf[this.pos++]; if ((b & 128) != 0) throw new Error("invalid varint"); this.assertBounds(); return result >>> 0; } exports.varint32read = varint32read; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js var require_pb_long = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.PbLong = exports.PbULong = exports.detectBi = void 0; const goog_varint_1$2 = require_goog_varint(); let BI; function detectBi() { const dv = new DataView(new ArrayBuffer(8)); const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; BI = ok ? { MIN: BigInt("-9223372036854775808"), MAX: BigInt("9223372036854775807"), UMIN: BigInt("0"), UMAX: BigInt("18446744073709551615"), C: BigInt, V: dv } : void 0; } exports.detectBi = detectBi; detectBi(); function assertBi(bi) { if (!bi) throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); } const RE_DECIMAL_STR = /^-?[0-9]+$/; const TWO_PWR_32_DBL = 4294967296; const HALF_2_PWR_32 = 2147483648; var SharedPbLong = class { /** * Create a new instance with the given bits. */ constructor(lo, hi) { this.lo = lo | 0; this.hi = hi | 0; } /** * Is this instance equal to 0? */ isZero() { return this.lo == 0 && this.hi == 0; } /** * Convert to a native number. */ toNumber() { let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); if (!Number.isSafeInteger(result)) throw new Error("cannot convert to safe number"); return result; } }; /** * 64-bit unsigned integer as two 32-bit values. * Converts between `string`, `number` and `bigint` representations. */ var PbULong = class PbULong extends SharedPbLong { /** * Create instance from a `string`, `number` or `bigint`. */ static from(value) { if (BI) switch (typeof value) { case "string": if (value == "0") return this.ZERO; if (value == "") throw new Error("string is no integer"); value = BI.C(value); case "number": if (value === 0) return this.ZERO; value = BI.C(value); case "bigint": if (!value) return this.ZERO; if (value < BI.UMIN) throw new Error("signed value for ulong"); if (value > BI.UMAX) throw new Error("ulong too large"); BI.V.setBigUint64(0, value, true); return new PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); } else switch (typeof value) { case "string": if (value == "0") return this.ZERO; value = value.trim(); if (!RE_DECIMAL_STR.test(value)) throw new Error("string is no integer"); let [minus, lo, hi] = goog_varint_1$2.int64fromString(value); if (minus) throw new Error("signed value for ulong"); return new PbULong(lo, hi); case "number": if (value == 0) return this.ZERO; if (!Number.isSafeInteger(value)) throw new Error("number is no integer"); if (value < 0) throw new Error("signed value for ulong"); return new PbULong(value, value / TWO_PWR_32_DBL); } throw new Error("unknown value " + typeof value); } /** * Convert to decimal string. */ toString() { return BI ? this.toBigInt().toString() : goog_varint_1$2.int64toString(this.lo, this.hi); } /** * Convert to native bigint. */ toBigInt() { assertBi(BI); BI.V.setInt32(0, this.lo, true); BI.V.setInt32(4, this.hi, true); return BI.V.getBigUint64(0, true); } }; exports.PbULong = PbULong; /** * ulong 0 singleton. */ PbULong.ZERO = new PbULong(0, 0); /** * 64-bit signed integer as two 32-bit values. * Converts between `string`, `number` and `bigint` representations. */ var PbLong = class PbLong extends SharedPbLong { /** * Create instance from a `string`, `number` or `bigint`. */ static from(value) { if (BI) switch (typeof value) { case "string": if (value == "0") return this.ZERO; if (value == "") throw new Error("string is no integer"); value = BI.C(value); case "number": if (value === 0) return this.ZERO; value = BI.C(value); case "bigint": if (!value) return this.ZERO; if (value < BI.MIN) throw new Error("signed long too small"); if (value > BI.MAX) throw new Error("signed long too large"); BI.V.setBigInt64(0, value, true); return new PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); } else switch (typeof value) { case "string": if (value == "0") return this.ZERO; value = value.trim(); if (!RE_DECIMAL_STR.test(value)) throw new Error("string is no integer"); let [minus, lo, hi] = goog_varint_1$2.int64fromString(value); if (minus) { if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) throw new Error("signed long too small"); } else if (hi >= HALF_2_PWR_32) throw new Error("signed long too large"); let pbl = new PbLong(lo, hi); return minus ? pbl.negate() : pbl; case "number": if (value == 0) return this.ZERO; if (!Number.isSafeInteger(value)) throw new Error("number is no integer"); return value > 0 ? new PbLong(value, value / TWO_PWR_32_DBL) : new PbLong(-value, -value / TWO_PWR_32_DBL).negate(); } throw new Error("unknown value " + typeof value); } /** * Do we have a minus sign? */ isNegative() { return (this.hi & HALF_2_PWR_32) !== 0; } /** * Negate two's complement. * Invert all the bits and add one to the result. */ negate() { let hi = ~this.hi, lo = this.lo; if (lo) lo = ~lo + 1; else hi += 1; return new PbLong(lo, hi); } /** * Convert to decimal string. */ toString() { if (BI) return this.toBigInt().toString(); if (this.isNegative()) { let n = this.negate(); return "-" + goog_varint_1$2.int64toString(n.lo, n.hi); } return goog_varint_1$2.int64toString(this.lo, this.hi); } /** * Convert to native bigint. */ toBigInt() { assertBi(BI); BI.V.setInt32(0, this.lo, true); BI.V.setInt32(4, this.hi, true); return BI.V.getBigInt64(0, true); } }; exports.PbLong = PbLong; /** * long 0 singleton. */ PbLong.ZERO = new PbLong(0, 0); } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js var require_binary_reader = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.BinaryReader = exports.binaryReadOptions = void 0; const binary_format_contract_1$3 = require_binary_format_contract(); const pb_long_1$6 = require_pb_long(); const goog_varint_1$1 = require_goog_varint(); const defaultsRead$1 = { readUnknownField: true, readerFactory: (bytes) => new BinaryReader(bytes) }; /** * Make options for reading binary data form partial options. */ function binaryReadOptions(options) { return options ? Object.assign(Object.assign({}, defaultsRead$1), options) : defaultsRead$1; } exports.binaryReadOptions = binaryReadOptions; var BinaryReader = class { constructor(buf, textDecoder) { this.varint64 = goog_varint_1$1.varint64read; /** * Read a `uint32` field, an unsigned 32 bit varint. */ this.uint32 = goog_varint_1$1.varint32read; this.buf = buf; this.len = buf.length; this.pos = 0; this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { fatal: true, ignoreBOM: true }); } /** * Reads a tag - field number and wire type. */ tag() { let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; if (fieldNo <= 0 || wireType < 0 || wireType > 5) throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); return [fieldNo, wireType]; } /** * Skip one element on the wire and return the skipped data. * Supports WireType.StartGroup since v2.0.0-alpha.23. */ skip(wireType) { let start = this.pos; switch (wireType) { case binary_format_contract_1$3.WireType.Varint: while (this.buf[this.pos++] & 128); break; case binary_format_contract_1$3.WireType.Bit64: this.pos += 4; case binary_format_contract_1$3.WireType.Bit32: this.pos += 4; break; case binary_format_contract_1$3.WireType.LengthDelimited: let len = this.uint32(); this.pos += len; break; case binary_format_contract_1$3.WireType.StartGroup: let t; while ((t = this.tag()[1]) !== binary_format_contract_1$3.WireType.EndGroup) this.skip(t); break; default: throw new Error("cant skip wire type " + wireType); } this.assertBounds(); return this.buf.subarray(start, this.pos); } /** * Throws error if position in byte array is out of range. */ assertBounds() { if (this.pos > this.len) throw new RangeError("premature EOF"); } /** * Read a `int32` field, a signed 32 bit varint. */ int32() { return this.uint32() | 0; } /** * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. */ sint32() { let zze = this.uint32(); return zze >>> 1 ^ -(zze & 1); } /** * Read a `int64` field, a signed 64-bit varint. */ int64() { return new pb_long_1$6.PbLong(...this.varint64()); } /** * Read a `uint64` field, an unsigned 64-bit varint. */ uint64() { return new pb_long_1$6.PbULong(...this.varint64()); } /** * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. */ sint64() { let [lo, hi] = this.varint64(); let s$1 = -(lo & 1); lo = (lo >>> 1 | (hi & 1) << 31) ^ s$1; hi = hi >>> 1 ^ s$1; return new pb_long_1$6.PbLong(lo, hi); } /** * Read a `bool` field, a variant. */ bool() { let [lo, hi] = this.varint64(); return lo !== 0 || hi !== 0; } /** * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. */ fixed32() { return this.view.getUint32((this.pos += 4) - 4, true); } /** * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. */ sfixed32() { return this.view.getInt32((this.pos += 4) - 4, true); } /** * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. */ fixed64() { return new pb_long_1$6.PbULong(this.sfixed32(), this.sfixed32()); } /** * Read a `fixed64` field, a signed, fixed-length 64-bit integer. */ sfixed64() { return new pb_long_1$6.PbLong(this.sfixed32(), this.sfixed32()); } /** * Read a `float` field, 32-bit floating point number. */ float() { return this.view.getFloat32((this.pos += 4) - 4, true); } /** * Read a `double` field, a 64-bit floating point number. */ double() { return this.view.getFloat64((this.pos += 8) - 8, true); } /** * Read a `bytes` field, length-delimited arbitrary data. */ bytes() { let len = this.uint32(); let start = this.pos; this.pos += len; this.assertBounds(); return this.buf.subarray(start, start + len); } /** * Read a `string` field, length-delimited data converted to UTF-8 text. */ string() { return this.textDecoder.decode(this.bytes()); } }; exports.BinaryReader = BinaryReader; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/assert.js var require_assert = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.assertFloat32 = exports.assertUInt32 = exports.assertInt32 = exports.assertNever = exports.assert = void 0; /** * assert that condition is true or throw error (with message) */ function assert(condition, msg) { if (!condition) throw new Error(msg); } exports.assert = assert; /** * assert that value cannot exist = type `never`. throw runtime error if it does. */ function assertNever(value, msg) { throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); } exports.assertNever = assertNever; const FLOAT32_MAX = 34028234663852886e22, FLOAT32_MIN = -34028234663852886e22, UINT32_MAX = 4294967295, INT32_MAX = 2147483647, INT32_MIN = -2147483648; function assertInt32(arg) { if (typeof arg !== "number") throw new Error("invalid int 32: " + typeof arg); if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) throw new Error("invalid int 32: " + arg); } exports.assertInt32 = assertInt32; function assertUInt32(arg) { if (typeof arg !== "number") throw new Error("invalid uint 32: " + typeof arg); if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) throw new Error("invalid uint 32: " + arg); } exports.assertUInt32 = assertUInt32; function assertFloat32(arg) { if (typeof arg !== "number") throw new Error("invalid float 32: " + typeof arg); if (!Number.isFinite(arg)) return; if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) throw new Error("invalid float 32: " + arg); } exports.assertFloat32 = assertFloat32; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js var require_binary_writer = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.BinaryWriter = exports.binaryWriteOptions = void 0; const pb_long_1$5 = require_pb_long(); const goog_varint_1 = require_goog_varint(); const assert_1$7 = require_assert(); const defaultsWrite$1 = { writeUnknownFields: true, writerFactory: () => new BinaryWriter() }; /** * Make options for writing binary data form partial options. */ function binaryWriteOptions(options) { return options ? Object.assign(Object.assign({}, defaultsWrite$1), options) : defaultsWrite$1; } exports.binaryWriteOptions = binaryWriteOptions; var BinaryWriter = class { constructor(textEncoder) { /** * Previous fork states. */ this.stack = []; this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); this.chunks = []; this.buf = []; } /** * Return all bytes written and reset this writer. */ finish() { this.chunks.push(new Uint8Array(this.buf)); let len = 0; for (let i = 0; i < this.chunks.length; i++) len += this.chunks[i].length; let bytes = new Uint8Array(len); let offset = 0; for (let i = 0; i < this.chunks.length; i++) { bytes.set(this.chunks[i], offset); offset += this.chunks[i].length; } this.chunks = []; return bytes; } /** * Start a new fork for length-delimited data like a message * or a packed repeated field. * * Must be joined later with `join()`. */ fork() { this.stack.push({ chunks: this.chunks, buf: this.buf }); this.chunks = []; this.buf = []; return this; } /** * Join the last fork. Write its length and bytes, then * return to the previous state. */ join() { let chunk = this.finish(); let prev = this.stack.pop(); if (!prev) throw new Error("invalid state, fork stack empty"); this.chunks = prev.chunks; this.buf = prev.buf; this.uint32(chunk.byteLength); return this.raw(chunk); } /** * Writes a tag (field number and wire type). * * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. * * Generated code should compute the tag ahead of time and call `uint32()`. */ tag(fieldNo, type) { return this.uint32((fieldNo << 3 | type) >>> 0); } /** * Write a chunk of raw bytes. */ raw(chunk) { if (this.buf.length) { this.chunks.push(new Uint8Array(this.buf)); this.buf = []; } this.chunks.push(chunk); return this; } /** * Write a `uint32` value, an unsigned 32 bit varint. */ uint32(value) { assert_1$7.assertUInt32(value); while (value > 127) { this.buf.push(value & 127 | 128); value = value >>> 7; } this.buf.push(value); return this; } /** * Write a `int32` value, a signed 32 bit varint. */ int32(value) { assert_1$7.assertInt32(value); goog_varint_1.varint32write(value, this.buf); return this; } /** * Write a `bool` value, a variant. */ bool(value) { this.buf.push(value ? 1 : 0); return this; } /** * Write a `bytes` value, length-delimited arbitrary data. */ bytes(value) { this.uint32(value.byteLength); return this.raw(value); } /** * Write a `string` value, length-delimited data converted to UTF-8 text. */ string(value) { let chunk = this.textEncoder.encode(value); this.uint32(chunk.byteLength); return this.raw(chunk); } /** * Write a `float` value, 32-bit floating point number. */ float(value) { assert_1$7.assertFloat32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setFloat32(0, value, true); return this.raw(chunk); } /** * Write a `double` value, a 64-bit floating point number. */ double(value) { let chunk = new Uint8Array(8); new DataView(chunk.buffer).setFloat64(0, value, true); return this.raw(chunk); } /** * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. */ fixed32(value) { assert_1$7.assertUInt32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setUint32(0, value, true); return this.raw(chunk); } /** * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. */ sfixed32(value) { assert_1$7.assertInt32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setInt32(0, value, true); return this.raw(chunk); } /** * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. */ sint32(value) { assert_1$7.assertInt32(value); value = (value << 1 ^ value >> 31) >>> 0; goog_varint_1.varint32write(value, this.buf); return this; } /** * Write a `fixed64` value, a signed, fixed-length 64-bit integer. */ sfixed64(value) { let chunk = new Uint8Array(8); let view = new DataView(chunk.buffer); let long = pb_long_1$5.PbLong.from(value); view.setInt32(0, long.lo, true); view.setInt32(4, long.hi, true); return this.raw(chunk); } /** * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. */ fixed64(value) { let chunk = new Uint8Array(8); let view = new DataView(chunk.buffer); let long = pb_long_1$5.PbULong.from(value); view.setInt32(0, long.lo, true); view.setInt32(4, long.hi, true); return this.raw(chunk); } /** * Write a `int64` value, a signed 64-bit varint. */ int64(value) { let long = pb_long_1$5.PbLong.from(value); goog_varint_1.varint64write(long.lo, long.hi, this.buf); return this; } /** * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. */ sint64(value) { let long = pb_long_1$5.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; goog_varint_1.varint64write(lo, hi, this.buf); return this; } /** * Write a `uint64` value, an unsigned 64-bit varint. */ uint64(value) { let long = pb_long_1$5.PbULong.from(value); goog_varint_1.varint64write(long.lo, long.hi, this.buf); return this; } }; exports.BinaryWriter = BinaryWriter; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js var require_json_format_contract = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.mergeJsonOptions = exports.jsonWriteOptions = exports.jsonReadOptions = void 0; const defaultsWrite = { emitDefaultValues: false, enumAsInteger: false, useProtoFieldName: false, prettySpaces: 0 }, defaultsRead = { ignoreUnknownFields: false }; /** * Make options for reading JSON data from partial options. */ function jsonReadOptions(options) { return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } exports.jsonReadOptions = jsonReadOptions; /** * Make options for writing JSON data from partial options. */ function jsonWriteOptions(options) { return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } exports.jsonWriteOptions = jsonWriteOptions; /** * Merges JSON write or read options. Later values override earlier values. Type registries are merged. */ function mergeJsonOptions(a, b) { var _a$2, _b$1; let c = Object.assign(Object.assign({}, a), b); c.typeRegistry = [...(_a$2 = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a$2 !== void 0 ? _a$2 : [], ...(_b$1 = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b$1 !== void 0 ? _b$1 : []]; return c; } exports.mergeJsonOptions = mergeJsonOptions; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js var require_message_type_contract = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.MESSAGE_TYPE = void 0; /** * The symbol used as a key on message objects to store the message type. * * Note that this is an experimental feature - it is here to stay, but * implementation details may change without notice. */ exports.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js var require_lower_camel_case = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.lowerCamelCase = void 0; /** * Converts snake_case to lowerCamelCase. * * Should behave like protoc: * https://github.com/protocolbuffers/protobuf/blob/e8ae137c96444ea313485ed1118c5e43b2099cf1/src/google/protobuf/compiler/java/java_helpers.cc#L118 */ function lowerCamelCase(snakeCase) { let capNext = false; const sb = []; for (let i = 0; i < snakeCase.length; i++) { let next = snakeCase.charAt(i); if (next == "_") capNext = true; else if (/\d/.test(next)) { sb.push(next); capNext = true; } else if (capNext) { sb.push(next.toUpperCase()); capNext = false; } else if (i == 0) sb.push(next.toLowerCase()); else sb.push(next); } return sb.join(""); } exports.lowerCamelCase = lowerCamelCase; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js var require_reflection_info$1 = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.readMessageOption = exports.readFieldOption = exports.readFieldOptions = exports.normalizeFieldInfo = exports.RepeatType = exports.LongType = exports.ScalarType = void 0; const lower_camel_case_1$1 = require_lower_camel_case(); /** * Scalar value types. This is a subset of field types declared by protobuf * enum google.protobuf.FieldDescriptorProto.Type The types GROUP and MESSAGE * are omitted, but the numerical values are identical. */ var ScalarType; (function(ScalarType$1) { ScalarType$1[ScalarType$1["DOUBLE"] = 1] = "DOUBLE"; ScalarType$1[ScalarType$1["FLOAT"] = 2] = "FLOAT"; ScalarType$1[ScalarType$1["INT64"] = 3] = "INT64"; ScalarType$1[ScalarType$1["UINT64"] = 4] = "UINT64"; ScalarType$1[ScalarType$1["INT32"] = 5] = "INT32"; ScalarType$1[ScalarType$1["FIXED64"] = 6] = "FIXED64"; ScalarType$1[ScalarType$1["FIXED32"] = 7] = "FIXED32"; ScalarType$1[ScalarType$1["BOOL"] = 8] = "BOOL"; ScalarType$1[ScalarType$1["STRING"] = 9] = "STRING"; ScalarType$1[ScalarType$1["BYTES"] = 12] = "BYTES"; ScalarType$1[ScalarType$1["UINT32"] = 13] = "UINT32"; ScalarType$1[ScalarType$1["SFIXED32"] = 15] = "SFIXED32"; ScalarType$1[ScalarType$1["SFIXED64"] = 16] = "SFIXED64"; ScalarType$1[ScalarType$1["SINT32"] = 17] = "SINT32"; ScalarType$1[ScalarType$1["SINT64"] = 18] = "SINT64"; })(ScalarType = exports.ScalarType || (exports.ScalarType = {})); /** * JavaScript representation of 64 bit integral types. Equivalent to the * field option "jstype". * * By default, protobuf-ts represents 64 bit types as `bigint`. * * You can change the default behaviour by enabling the plugin parameter * `long_type_string`, which will represent 64 bit types as `string`. * * Alternatively, you can change the behaviour for individual fields * with the field option "jstype": * * ```protobuf * uint64 my_field = 1 [jstype = JS_STRING]; * uint64 other_field = 2 [jstype = JS_NUMBER]; * ``` */ var LongType; (function(LongType$1) { /** * Use JavaScript `bigint`. * * Field option `[jstype = JS_NORMAL]`. */ LongType$1[LongType$1["BIGINT"] = 0] = "BIGINT"; /** * Use JavaScript `string`. * * Field option `[jstype = JS_STRING]`. */ LongType$1[LongType$1["STRING"] = 1] = "STRING"; /** * Use JavaScript `number`. * * Large values will loose precision. * * Field option `[jstype = JS_NUMBER]`. */ LongType$1[LongType$1["NUMBER"] = 2] = "NUMBER"; })(LongType = exports.LongType || (exports.LongType = {})); /** * Protobuf 2.1.0 introduced packed repeated fields. * Setting the field option `[packed = true]` enables packing. * * In proto3, all repeated fields are packed by default. * Setting the field option `[packed = false]` disables packing. * * Packed repeated fields are encoded with a single tag, * then a length-delimiter, then the element values. * * Unpacked repeated fields are encoded with a tag and * value for each element. * * `bytes` and `string` cannot be packed. */ var RepeatType; (function(RepeatType$1) { /** * The field is not repeated. */ RepeatType$1[RepeatType$1["NO"] = 0] = "NO"; /** * The field is repeated and should be packed. * Invalid for `bytes` and `string`, they cannot be packed. */ RepeatType$1[RepeatType$1["PACKED"] = 1] = "PACKED"; /** * The field is repeated but should not be packed. * The only valid repeat type for repeated `bytes` and `string`. */ RepeatType$1[RepeatType$1["UNPACKED"] = 2] = "UNPACKED"; })(RepeatType = exports.RepeatType || (exports.RepeatType = {})); /** * Turns PartialFieldInfo into FieldInfo. */ function normalizeFieldInfo(field) { var _a$2, _b$1, _c$1, _d$1; field.localName = (_a$2 = field.localName) !== null && _a$2 !== void 0 ? _a$2 : lower_camel_case_1$1.lowerCamelCase(field.name); field.jsonName = (_b$1 = field.jsonName) !== null && _b$1 !== void 0 ? _b$1 : lower_camel_case_1$1.lowerCamelCase(field.name); field.repeat = (_c$1 = field.repeat) !== null && _c$1 !== void 0 ? _c$1 : RepeatType.NO; field.opt = (_d$1 = field.opt) !== null && _d$1 !== void 0 ? _d$1 : field.repeat ? false : field.oneof ? false : field.kind == "message"; return field; } exports.normalizeFieldInfo = normalizeFieldInfo; /** * Read custom field options from a generated message type. * * @deprecated use readFieldOption() */ function readFieldOptions(messageType, fieldName, extensionName, extensionType) { var _a$2; const options = (_a$2 = messageType.fields.find((m$1, i) => m$1.localName == fieldName || i == fieldName)) === null || _a$2 === void 0 ? void 0 : _a$2.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } exports.readFieldOptions = readFieldOptions; function readFieldOption(messageType, fieldName, extensionName, extensionType) { var _a$2; const options = (_a$2 = messageType.fields.find((m$1, i) => m$1.localName == fieldName || i == fieldName)) === null || _a$2 === void 0 ? void 0 : _a$2.options; if (!options) return void 0; const optionVal = options[extensionName]; if (optionVal === void 0) return optionVal; return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readFieldOption = readFieldOption; function readMessageOption(messageType, extensionName, extensionType) { const options = messageType.options; const optionVal = options[extensionName]; if (optionVal === void 0) return optionVal; return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readMessageOption = readMessageOption; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js var require_oneof = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.getSelectedOneofValue = exports.clearOneofValue = exports.setUnknownOneofValue = exports.setOneofValue = exports.getOneofValue = exports.isOneofGroup = void 0; /** * Is the given value a valid oneof group? * * We represent protobuf `oneof` as algebraic data types (ADT) in generated * code. But when working with messages of unknown type, the ADT does not * help us. * * This type guard checks if the given object adheres to the ADT rules, which * are as follows: * * 1) Must be an object. * * 2) Must have a "oneofKind" discriminator property. * * 3) If "oneofKind" is `undefined`, no member field is selected. The object * must not have any other properties. * * 4) If "oneofKind" is a `string`, the member field with this name is * selected. * * 5) If a member field is selected, the object must have a second property * with this name. The property must not be `undefined`. * * 6) No extra properties are allowed. The object has either one property * (no selection) or two properties (selection). * */ function isOneofGroup(any) { if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) return false; switch (typeof any.oneofKind) { case "string": if (any[any.oneofKind] === void 0) return false; return Object.keys(any).length == 2; case "undefined": return Object.keys(any).length == 1; default: return false; } } exports.isOneofGroup = isOneofGroup; /** * Returns the value of the given field in a oneof group. */ function getOneofValue(oneof, kind) { return oneof[kind]; } exports.getOneofValue = getOneofValue; function setOneofValue(oneof, kind, value) { if (oneof.oneofKind !== void 0) delete oneof[oneof.oneofKind]; oneof.oneofKind = kind; if (value !== void 0) oneof[kind] = value; } exports.setOneofValue = setOneofValue; function setUnknownOneofValue(oneof, kind, value) { if (oneof.oneofKind !== void 0) delete oneof[oneof.oneofKind]; oneof.oneofKind = kind; if (value !== void 0 && kind !== void 0) oneof[kind] = value; } exports.setUnknownOneofValue = setUnknownOneofValue; /** * Removes the selected field in a oneof group. * * Note that the recommended way to modify a oneof group is to set * a new object: * * ```ts * message.result = { oneofKind: undefined }; * ``` */ function clearOneofValue(oneof) { if (oneof.oneofKind !== void 0) delete oneof[oneof.oneofKind]; oneof.oneofKind = void 0; } exports.clearOneofValue = clearOneofValue; /** * Returns the selected value of the given oneof group. * * Not that the recommended way to access a oneof group is to check * the "oneofKind" property and let TypeScript narrow down the union * type for you: * * ```ts * if (message.result.oneofKind === "error") { * message.result.error; // string * } * ``` * * In the rare case you just need the value, and do not care about * which protobuf field is selected, you can use this function * for convenience. */ function getSelectedOneofValue(oneof) { if (oneof.oneofKind === void 0) return void 0; return oneof[oneof.oneofKind]; } exports.getSelectedOneofValue = getSelectedOneofValue; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js var require_reflection_type_check = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ReflectionTypeCheck = void 0; const reflection_info_1$11 = require_reflection_info$1(); const oneof_1$1 = require_oneof(); var ReflectionTypeCheck = class { constructor(info) { var _a$2; this.fields = (_a$2 = info.fields) !== null && _a$2 !== void 0 ? _a$2 : []; } prepare() { if (this.data) return; const req$1 = [], known = [], oneofs = []; for (let field of this.fields) if (field.oneof) { if (!oneofs.includes(field.oneof)) { oneofs.push(field.oneof); req$1.push(field.oneof); known.push(field.oneof); } } else { known.push(field.localName); switch (field.kind) { case "scalar": case "enum": if (!field.opt || field.repeat) req$1.push(field.localName); break; case "message": if (field.repeat) req$1.push(field.localName); break; case "map": req$1.push(field.localName); break; } } this.data = { req: req$1, known, oneofs: Object.values(oneofs) }; } /** * Is the argument a valid message as specified by the * reflection information? * * Checks all field types recursively. The `depth` * specifies how deep into the structure the check will be. * * With a depth of 0, only the presence of fields * is checked. * * With a depth of 1 or more, the field types are checked. * * With a depth of 2 or more, the members of map, repeated * and message fields are checked. * * Message fields will be checked recursively with depth - 1. * * The number of map entries / repeated values being checked * is < depth. */ is(message, depth, allowExcessProperties = false) { if (depth < 0) return true; if (message === null || message === void 0 || typeof message != "object") return false; this.prepare(); let keys = Object.keys(message), data = this.data; if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) return false; if (!allowExcessProperties) { if (keys.some((k) => !data.known.includes(k))) return false; } if (depth < 1) return true; for (const name of data.oneofs) { const group = message[name]; if (!oneof_1$1.isOneofGroup(group)) return false; if (group.oneofKind === void 0) continue; const field = this.fields.find((f) => f.localName === group.oneofKind); if (!field) return false; if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) return false; } for (const field of this.fields) { if (field.oneof !== void 0) continue; if (!this.field(message[field.localName], field, allowExcessProperties, depth)) return false; } return true; } field(arg, field, allowExcessProperties, depth) { let repeated = field.repeat; switch (field.kind) { case "scalar": if (arg === void 0) return field.opt; if (repeated) return this.scalars(arg, field.T, depth, field.L); return this.scalar(arg, field.T, field.L); case "enum": if (arg === void 0) return field.opt; if (repeated) return this.scalars(arg, reflection_info_1$11.ScalarType.INT32, depth); return this.scalar(arg, reflection_info_1$11.ScalarType.INT32); case "message": if (arg === void 0) return true; if (repeated) return this.messages(arg, field.T(), allowExcessProperties, depth); return this.message(arg, field.T(), allowExcessProperties, depth); case "map": if (typeof arg != "object" || arg === null) return false; if (depth < 2) return true; if (!this.mapKeys(arg, field.K, depth)) return false; switch (field.V.kind) { case "scalar": return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); case "enum": return this.scalars(Object.values(arg), reflection_info_1$11.ScalarType.INT32, depth); case "message": return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); } break; } return true; } message(arg, type, allowExcessProperties, depth) { if (allowExcessProperties) return type.isAssignable(arg, depth); return type.is(arg, depth); } messages(arg, type, allowExcessProperties, depth) { if (!Array.isArray(arg)) return false; if (depth < 2) return true; if (allowExcessProperties) { for (let i = 0; i < arg.length && i < depth; i++) if (!type.isAssignable(arg[i], depth - 1)) return false; } else for (let i = 0; i < arg.length && i < depth; i++) if (!type.is(arg[i], depth - 1)) return false; return true; } scalar(arg, type, longType) { let argType = typeof arg; switch (type) { case reflection_info_1$11.ScalarType.UINT64: case reflection_info_1$11.ScalarType.FIXED64: case reflection_info_1$11.ScalarType.INT64: case reflection_info_1$11.ScalarType.SFIXED64: case reflection_info_1$11.ScalarType.SINT64: switch (longType) { case reflection_info_1$11.LongType.BIGINT: return argType == "bigint"; case reflection_info_1$11.LongType.NUMBER: return argType == "number" && !isNaN(arg); default: return argType == "string"; } case reflection_info_1$11.ScalarType.BOOL: return argType == "boolean"; case reflection_info_1$11.ScalarType.STRING: return argType == "string"; case reflection_info_1$11.ScalarType.BYTES: return arg instanceof Uint8Array; case reflection_info_1$11.ScalarType.DOUBLE: case reflection_info_1$11.ScalarType.FLOAT: return argType == "number" && !isNaN(arg); default: return argType == "number" && Number.isInteger(arg); } } scalars(arg, type, depth, longType) { if (!Array.isArray(arg)) return false; if (depth < 2) return true; if (Array.isArray(arg)) { for (let i = 0; i < arg.length && i < depth; i++) if (!this.scalar(arg[i], type, longType)) return false; } return true; } mapKeys(map, type, depth) { let keys = Object.keys(map); switch (type) { case reflection_info_1$11.ScalarType.INT32: case reflection_info_1$11.ScalarType.FIXED32: case reflection_info_1$11.ScalarType.SFIXED32: case reflection_info_1$11.ScalarType.SINT32: case reflection_info_1$11.ScalarType.UINT32: return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type, depth); case reflection_info_1$11.ScalarType.BOOL: return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type, depth); default: return this.scalars(keys, type, depth, reflection_info_1$11.LongType.STRING); } } }; exports.ReflectionTypeCheck = ReflectionTypeCheck; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js var require_reflection_long_convert = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.reflectionLongConvert = void 0; const reflection_info_1$10 = require_reflection_info$1(); /** * Utility method to convert a PbLong or PbUlong to a JavaScript * representation during runtime. * * Works with generated field information, `undefined` is equivalent * to `STRING`. */ function reflectionLongConvert(long, type) { switch (type) { case reflection_info_1$10.LongType.BIGINT: return long.toBigInt(); case reflection_info_1$10.LongType.NUMBER: return long.toNumber(); default: return long.toString(); } } exports.reflectionLongConvert = reflectionLongConvert; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js var require_reflection_json_reader = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ReflectionJsonReader = void 0; const json_typings_1$2 = require_json_typings(); const base64_1$2 = require_base64(); const reflection_info_1$9 = require_reflection_info$1(); const pb_long_1$4 = require_pb_long(); const assert_1$6 = require_assert(); const reflection_long_convert_1$2 = require_reflection_long_convert(); /** * Reads proto3 messages in canonical JSON format using reflection information. * * https://developers.google.com/protocol-buffers/docs/proto3#json */ var ReflectionJsonReader = class { constructor(info) { this.info = info; } prepare() { var _a$2; if (this.fMap === void 0) { this.fMap = {}; const fieldsInput = (_a$2 = this.info.fields) !== null && _a$2 !== void 0 ? _a$2 : []; for (const field of fieldsInput) { this.fMap[field.name] = field; this.fMap[field.jsonName] = field; this.fMap[field.localName] = field; } } } assert(condition, fieldName, jsonValue) { if (!condition) { let what = json_typings_1$2.typeofJsonValue(jsonValue); if (what == "number" || what == "boolean") what = jsonValue.toString(); throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); } } /** * Reads a message from canonical JSON format into the target message. * * Repeated fields are appended. Map entries are added, overwriting * existing keys. * * If a message field is already present, it will be merged with the * new data. */ read(input, message, options) { this.prepare(); const oneofsHandled = []; for (const [jsonKey, jsonValue] of Object.entries(input)) { const field = this.fMap[jsonKey]; if (!field) { if (!options.ignoreUnknownFields) throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); continue; } const localName = field.localName; let target; if (field.oneof) { if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) continue; if (oneofsHandled.includes(field.oneof)) throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); oneofsHandled.push(field.oneof); target = message[field.oneof] = { oneofKind: localName }; } else target = message; if (field.kind == "map") { if (jsonValue === null) continue; this.assert(json_typings_1$2.isJsonObject(jsonValue), field.name, jsonValue); const fieldObj = target[localName]; for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { this.assert(jsonObjValue !== null, field.name + " map value", null); let val; switch (field.V.kind) { case "message": val = field.V.T().internalJsonRead(jsonObjValue, options); break; case "enum": val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); if (val === false) continue; break; case "scalar": val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); break; } this.assert(val !== void 0, field.name + " map value", jsonObjValue); let key = jsonObjKey; if (field.K == reflection_info_1$9.ScalarType.BOOL) key = key == "true" ? true : key == "false" ? false : key; key = this.scalar(key, field.K, reflection_info_1$9.LongType.STRING, field.name).toString(); fieldObj[key] = val; } } else if (field.repeat) { if (jsonValue === null) continue; this.assert(Array.isArray(jsonValue), field.name, jsonValue); const fieldArr = target[localName]; for (const jsonItem of jsonValue) { this.assert(jsonItem !== null, field.name, null); let val; switch (field.kind) { case "message": val = field.T().internalJsonRead(jsonItem, options); break; case "enum": val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); if (val === false) continue; break; case "scalar": val = this.scalar(jsonItem, field.T, field.L, field.name); break; } this.assert(val !== void 0, field.name, jsonValue); fieldArr.push(val); } } else switch (field.kind) { case "message": if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { this.assert(field.oneof === void 0, field.name + " (oneof member)", null); continue; } target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); break; case "enum": if (jsonValue === null) continue; let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); if (val === false) continue; target[localName] = val; break; case "scalar": if (jsonValue === null) continue; target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); break; } } } /** * Returns `false` for unrecognized string representations. * * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). */ enum(type, json$1, fieldName, ignoreUnknownFields) { if (type[0] == "google.protobuf.NullValue") assert_1$6.assert(json$1 === null || json$1 === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); if (json$1 === null) return 0; switch (typeof json$1) { case "number": assert_1$6.assert(Number.isInteger(json$1), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json$1}.`); return json$1; case "string": let localEnumName = json$1; if (type[2] && json$1.substring(0, type[2].length) === type[2]) localEnumName = json$1.substring(type[2].length); let enumNumber = type[1][localEnumName]; if (typeof enumNumber === "undefined" && ignoreUnknownFields) return false; assert_1$6.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json$1}".`); return enumNumber; } assert_1$6.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json$1}".`); } scalar(json$1, type, longType, fieldName) { let e; try { switch (type) { case reflection_info_1$9.ScalarType.DOUBLE: case reflection_info_1$9.ScalarType.FLOAT: if (json$1 === null) return 0; if (json$1 === "NaN") return Number.NaN; if (json$1 === "Infinity") return Number.POSITIVE_INFINITY; if (json$1 === "-Infinity") return Number.NEGATIVE_INFINITY; if (json$1 === "") { e = "empty string"; break; } if (typeof json$1 == "string" && json$1.trim().length !== json$1.length) { e = "extra whitespace"; break; } if (typeof json$1 != "string" && typeof json$1 != "number") break; let float = Number(json$1); if (Number.isNaN(float)) { e = "not a number"; break; } if (!Number.isFinite(float)) { e = "too large or small"; break; } if (type == reflection_info_1$9.ScalarType.FLOAT) assert_1$6.assertFloat32(float); return float; case reflection_info_1$9.ScalarType.INT32: case reflection_info_1$9.ScalarType.FIXED32: case reflection_info_1$9.ScalarType.SFIXED32: case reflection_info_1$9.ScalarType.SINT32: case reflection_info_1$9.ScalarType.UINT32: if (json$1 === null) return 0; let int32; if (typeof json$1 == "number") int32 = json$1; else if (json$1 === "") e = "empty string"; else if (typeof json$1 == "string") if (json$1.trim().length !== json$1.length) e = "extra whitespace"; else int32 = Number(json$1); if (int32 === void 0) break; if (type == reflection_info_1$9.ScalarType.UINT32) assert_1$6.assertUInt32(int32); else assert_1$6.assertInt32(int32); return int32; case reflection_info_1$9.ScalarType.INT64: case reflection_info_1$9.ScalarType.SFIXED64: case reflection_info_1$9.ScalarType.SINT64: if (json$1 === null) return reflection_long_convert_1$2.reflectionLongConvert(pb_long_1$4.PbLong.ZERO, longType); if (typeof json$1 != "number" && typeof json$1 != "string") break; return reflection_long_convert_1$2.reflectionLongConvert(pb_long_1$4.PbLong.from(json$1), longType); case reflection_info_1$9.ScalarType.FIXED64: case reflection_info_1$9.ScalarType.UINT64: if (json$1 === null) return reflection_long_convert_1$2.reflectionLongConvert(pb_long_1$4.PbULong.ZERO, longType); if (typeof json$1 != "number" && typeof json$1 != "string") break; return reflection_long_convert_1$2.reflectionLongConvert(pb_long_1$4.PbULong.from(json$1), longType); case reflection_info_1$9.ScalarType.BOOL: if (json$1 === null) return false; if (typeof json$1 !== "boolean") break; return json$1; case reflection_info_1$9.ScalarType.STRING: if (json$1 === null) return ""; if (typeof json$1 !== "string") { e = "extra whitespace"; break; } try { encodeURIComponent(json$1); } catch (e$1) { e$1 = "invalid UTF8"; break; } return json$1; case reflection_info_1$9.ScalarType.BYTES: if (json$1 === null || json$1 === "") return new Uint8Array(0); if (typeof json$1 !== "string") break; return base64_1$2.base64decode(json$1); } } catch (error) { e = error.message; } this.assert(false, fieldName + (e ? " - " + e : ""), json$1); } }; exports.ReflectionJsonReader = ReflectionJsonReader; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js var require_reflection_json_writer = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ReflectionJsonWriter = void 0; const base64_1$1 = require_base64(); const pb_long_1$3 = require_pb_long(); const reflection_info_1$8 = require_reflection_info$1(); const assert_1$5 = require_assert(); /** * Writes proto3 messages in canonical JSON format using reflection * information. * * https://developers.google.com/protocol-buffers/docs/proto3#json */ var ReflectionJsonWriter = class { constructor(info) { var _a$2; this.fields = (_a$2 = info.fields) !== null && _a$2 !== void 0 ? _a$2 : []; } /** * Converts the message to a JSON object, based on the field descriptors. */ write(message, options) { const json$1 = {}, source = message; for (const field of this.fields) { if (!field.oneof) { let jsonValue$1 = this.field(field, source[field.localName], options); if (jsonValue$1 !== void 0) json$1[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue$1; continue; } const group = source[field.oneof]; if (group.oneofKind !== field.localName) continue; const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; let jsonValue = this.field(field, group[field.localName], opt); assert_1$5.assert(jsonValue !== void 0); json$1[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; } return json$1; } field(field, value, options) { let jsonValue = void 0; if (field.kind == "map") { assert_1$5.assert(typeof value == "object" && value !== null); const jsonObj = {}; switch (field.V.kind) { case "scalar": for (const [entryKey, entryValue] of Object.entries(value)) { const val = this.scalar(field.V.T, entryValue, field.name, false, true); assert_1$5.assert(val !== void 0); jsonObj[entryKey.toString()] = val; } break; case "message": const messageType = field.V.T(); for (const [entryKey, entryValue] of Object.entries(value)) { const val = this.message(messageType, entryValue, field.name, options); assert_1$5.assert(val !== void 0); jsonObj[entryKey.toString()] = val; } break; case "enum": const enumInfo = field.V.T(); for (const [entryKey, entryValue] of Object.entries(value)) { assert_1$5.assert(entryValue === void 0 || typeof entryValue == "number"); const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); assert_1$5.assert(val !== void 0); jsonObj[entryKey.toString()] = val; } break; } if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) jsonValue = jsonObj; } else if (field.repeat) { assert_1$5.assert(Array.isArray(value)); const jsonArr = []; switch (field.kind) { case "scalar": for (let i = 0; i < value.length; i++) { const val = this.scalar(field.T, value[i], field.name, field.opt, true); assert_1$5.assert(val !== void 0); jsonArr.push(val); } break; case "enum": const enumInfo = field.T(); for (let i = 0; i < value.length; i++) { assert_1$5.assert(value[i] === void 0 || typeof value[i] == "number"); const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); assert_1$5.assert(val !== void 0); jsonArr.push(val); } break; case "message": const messageType = field.T(); for (let i = 0; i < value.length; i++) { const val = this.message(messageType, value[i], field.name, options); assert_1$5.assert(val !== void 0); jsonArr.push(val); } break; } if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) jsonValue = jsonArr; } else switch (field.kind) { case "scalar": jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); break; case "enum": jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); break; case "message": jsonValue = this.message(field.T(), value, field.name, options); break; } return jsonValue; } /** * Returns `null` as the default for google.protobuf.NullValue. */ enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { if (type[0] == "google.protobuf.NullValue") return !emitDefaultValues && !optional ? void 0 : null; if (value === void 0) { assert_1$5.assert(optional); return void 0; } if (value === 0 && !emitDefaultValues && !optional) return void 0; assert_1$5.assert(typeof value == "number"); assert_1$5.assert(Number.isInteger(value)); if (enumAsInteger || !type[1].hasOwnProperty(value)) return value; if (type[2]) return type[2] + type[1][value]; return type[1][value]; } message(type, value, fieldName, options) { if (value === void 0) return options.emitDefaultValues ? null : void 0; return type.internalJsonWrite(value, options); } scalar(type, value, fieldName, optional, emitDefaultValues) { if (value === void 0) { assert_1$5.assert(optional); return void 0; } const ed = emitDefaultValues || optional; switch (type) { case reflection_info_1$8.ScalarType.INT32: case reflection_info_1$8.ScalarType.SFIXED32: case reflection_info_1$8.ScalarType.SINT32: if (value === 0) return ed ? 0 : void 0; assert_1$5.assertInt32(value); return value; case reflection_info_1$8.ScalarType.FIXED32: case reflection_info_1$8.ScalarType.UINT32: if (value === 0) return ed ? 0 : void 0; assert_1$5.assertUInt32(value); return value; case reflection_info_1$8.ScalarType.FLOAT: assert_1$5.assertFloat32(value); case reflection_info_1$8.ScalarType.DOUBLE: if (value === 0) return ed ? 0 : void 0; assert_1$5.assert(typeof value == "number"); if (Number.isNaN(value)) return "NaN"; if (value === Number.POSITIVE_INFINITY) return "Infinity"; if (value === Number.NEGATIVE_INFINITY) return "-Infinity"; return value; case reflection_info_1$8.ScalarType.STRING: if (value === "") return ed ? "" : void 0; assert_1$5.assert(typeof value == "string"); return value; case reflection_info_1$8.ScalarType.BOOL: if (value === false) return ed ? false : void 0; assert_1$5.assert(typeof value == "boolean"); return value; case reflection_info_1$8.ScalarType.UINT64: case reflection_info_1$8.ScalarType.FIXED64: assert_1$5.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); let ulong = pb_long_1$3.PbULong.from(value); if (ulong.isZero() && !ed) return void 0; return ulong.toString(); case reflection_info_1$8.ScalarType.INT64: case reflection_info_1$8.ScalarType.SFIXED64: case reflection_info_1$8.ScalarType.SINT64: assert_1$5.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); let long = pb_long_1$3.PbLong.from(value); if (long.isZero() && !ed) return void 0; return long.toString(); case reflection_info_1$8.ScalarType.BYTES: assert_1$5.assert(value instanceof Uint8Array); if (!value.byteLength) return ed ? "" : void 0; return base64_1$1.base64encode(value); } } }; exports.ReflectionJsonWriter = ReflectionJsonWriter; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js var require_reflection_scalar_default = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.reflectionScalarDefault = void 0; const reflection_info_1$7 = require_reflection_info$1(); const reflection_long_convert_1$1 = require_reflection_long_convert(); const pb_long_1$2 = require_pb_long(); /** * Creates the default value for a scalar type. */ function reflectionScalarDefault(type, longType = reflection_info_1$7.LongType.STRING) { switch (type) { case reflection_info_1$7.ScalarType.BOOL: return false; case reflection_info_1$7.ScalarType.UINT64: case reflection_info_1$7.ScalarType.FIXED64: return reflection_long_convert_1$1.reflectionLongConvert(pb_long_1$2.PbULong.ZERO, longType); case reflection_info_1$7.ScalarType.INT64: case reflection_info_1$7.ScalarType.SFIXED64: case reflection_info_1$7.ScalarType.SINT64: return reflection_long_convert_1$1.reflectionLongConvert(pb_long_1$2.PbLong.ZERO, longType); case reflection_info_1$7.ScalarType.DOUBLE: case reflection_info_1$7.ScalarType.FLOAT: return 0; case reflection_info_1$7.ScalarType.BYTES: return new Uint8Array(0); case reflection_info_1$7.ScalarType.STRING: return ""; default: return 0; } } exports.reflectionScalarDefault = reflectionScalarDefault; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js var require_reflection_binary_reader = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ReflectionBinaryReader = void 0; const binary_format_contract_1$2 = require_binary_format_contract(); const reflection_info_1$6 = require_reflection_info$1(); const reflection_long_convert_1 = require_reflection_long_convert(); const reflection_scalar_default_1$2 = require_reflection_scalar_default(); /** * Reads proto3 messages in binary format using reflection information. * * https://developers.google.com/protocol-buffers/docs/encoding */ var ReflectionBinaryReader = class { constructor(info) { this.info = info; } prepare() { var _a$2; if (!this.fieldNoToField) { const fieldsInput = (_a$2 = this.info.fields) !== null && _a$2 !== void 0 ? _a$2 : []; this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); } } /** * Reads a message from binary format into the target message. * * Repeated fields are appended. Map entries are added, overwriting * existing keys. * * If a message field is already present, it will be merged with the * new data. */ read(reader, message, options, length) { this.prepare(); const end = length === void 0 ? reader.len : reader.pos + length; while (reader.pos < end) { const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); if (!field) { let u = options.readUnknownField; if (u == "throw") throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? binary_format_contract_1$2.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d$1); continue; } let target = message, repeated = field.repeat, localName = field.localName; if (field.oneof) { target = target[field.oneof]; if (target.oneofKind !== localName) target = message[field.oneof] = { oneofKind: localName }; } switch (field.kind) { case "scalar": case "enum": let T = field.kind == "enum" ? reflection_info_1$6.ScalarType.INT32 : field.T; let L = field.kind == "scalar" ? field.L : void 0; if (repeated) { let arr = target[localName]; if (wireType == binary_format_contract_1$2.WireType.LengthDelimited && T != reflection_info_1$6.ScalarType.STRING && T != reflection_info_1$6.ScalarType.BYTES) { let e = reader.uint32() + reader.pos; while (reader.pos < e) arr.push(this.scalar(reader, T, L)); } else arr.push(this.scalar(reader, T, L)); } else target[localName] = this.scalar(reader, T, L); break; case "message": if (repeated) { let arr = target[localName]; let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); arr.push(msg); } else target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); break; case "map": let [mapKey, mapVal] = this.mapEntry(field, reader, options); target[localName][mapKey] = mapVal; break; } } } /** * Read a map field, expecting key field = 1, value field = 2 */ mapEntry(field, reader, options) { let length = reader.uint32(); let end = reader.pos + length; let key = void 0; let val = void 0; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: if (field.K == reflection_info_1$6.ScalarType.BOOL) key = reader.bool().toString(); else key = this.scalar(reader, field.K, reflection_info_1$6.LongType.STRING); break; case 2: switch (field.V.kind) { case "scalar": val = this.scalar(reader, field.V.T, field.V.L); break; case "enum": val = reader.int32(); break; case "message": val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); break; } break; default: throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); } } if (key === void 0) { let keyRaw = reflection_scalar_default_1$2.reflectionScalarDefault(field.K); key = field.K == reflection_info_1$6.ScalarType.BOOL ? keyRaw.toString() : keyRaw; } if (val === void 0) switch (field.V.kind) { case "scalar": val = reflection_scalar_default_1$2.reflectionScalarDefault(field.V.T, field.V.L); break; case "enum": val = 0; break; case "message": val = field.V.T().create(); break; } return [key, val]; } scalar(reader, type, longType) { switch (type) { case reflection_info_1$6.ScalarType.INT32: return reader.int32(); case reflection_info_1$6.ScalarType.STRING: return reader.string(); case reflection_info_1$6.ScalarType.BOOL: return reader.bool(); case reflection_info_1$6.ScalarType.DOUBLE: return reader.double(); case reflection_info_1$6.ScalarType.FLOAT: return reader.float(); case reflection_info_1$6.ScalarType.INT64: return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); case reflection_info_1$6.ScalarType.UINT64: return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); case reflection_info_1$6.ScalarType.FIXED64: return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); case reflection_info_1$6.ScalarType.FIXED32: return reader.fixed32(); case reflection_info_1$6.ScalarType.BYTES: return reader.bytes(); case reflection_info_1$6.ScalarType.UINT32: return reader.uint32(); case reflection_info_1$6.ScalarType.SFIXED32: return reader.sfixed32(); case reflection_info_1$6.ScalarType.SFIXED64: return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); case reflection_info_1$6.ScalarType.SINT32: return reader.sint32(); case reflection_info_1$6.ScalarType.SINT64: return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } } }; exports.ReflectionBinaryReader = ReflectionBinaryReader; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js var require_reflection_binary_writer = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ReflectionBinaryWriter = void 0; const binary_format_contract_1$1 = require_binary_format_contract(); const reflection_info_1$5 = require_reflection_info$1(); const assert_1$4 = require_assert(); const pb_long_1$1 = require_pb_long(); /** * Writes proto3 messages in binary format using reflection information. * * https://developers.google.com/protocol-buffers/docs/encoding */ var ReflectionBinaryWriter = class { constructor(info) { this.info = info; } prepare() { if (!this.fields) { const fieldsInput = this.info.fields ? this.info.fields.concat() : []; this.fields = fieldsInput.sort((a, b) => a.no - b.no); } } /** * Writes the message to binary format. */ write(message, writer, options) { this.prepare(); for (const field of this.fields) { let value, emitDefault, repeated = field.repeat, localName = field.localName; if (field.oneof) { const group = message[field.oneof]; if (group.oneofKind !== localName) continue; value = group[localName]; emitDefault = true; } else { value = message[localName]; emitDefault = false; } switch (field.kind) { case "scalar": case "enum": let T = field.kind == "enum" ? reflection_info_1$5.ScalarType.INT32 : field.T; if (repeated) { assert_1$4.assert(Array.isArray(value)); if (repeated == reflection_info_1$5.RepeatType.PACKED) this.packed(writer, T, field.no, value); else for (const item of value) this.scalar(writer, T, field.no, item, true); } else if (value === void 0) assert_1$4.assert(field.opt); else this.scalar(writer, T, field.no, value, emitDefault || field.opt); break; case "message": if (repeated) { assert_1$4.assert(Array.isArray(value)); for (const item of value) this.message(writer, options, field.T(), field.no, item); } else this.message(writer, options, field.T(), field.no, value); break; case "map": assert_1$4.assert(typeof value == "object" && value !== null); for (const [key, val] of Object.entries(value)) this.mapEntry(writer, options, field, key, val); break; } } let u = options.writeUnknownFields; if (u !== false) (u === true ? binary_format_contract_1$1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); } mapEntry(writer, options, field, key, value) { writer.tag(field.no, binary_format_contract_1$1.WireType.LengthDelimited); writer.fork(); let keyValue = key; switch (field.K) { case reflection_info_1$5.ScalarType.INT32: case reflection_info_1$5.ScalarType.FIXED32: case reflection_info_1$5.ScalarType.UINT32: case reflection_info_1$5.ScalarType.SFIXED32: case reflection_info_1$5.ScalarType.SINT32: keyValue = Number.parseInt(key); break; case reflection_info_1$5.ScalarType.BOOL: assert_1$4.assert(key == "true" || key == "false"); keyValue = key == "true"; break; } this.scalar(writer, field.K, 1, keyValue, true); switch (field.V.kind) { case "scalar": this.scalar(writer, field.V.T, 2, value, true); break; case "enum": this.scalar(writer, reflection_info_1$5.ScalarType.INT32, 2, value, true); break; case "message": this.message(writer, options, field.V.T(), 2, value); break; } writer.join(); } message(writer, options, handler, fieldNo, value) { if (value === void 0) return; handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1$1.WireType.LengthDelimited).fork(), options); writer.join(); } /** * Write a single scalar value. */ scalar(writer, type, fieldNo, value, emitDefault) { let [wireType, method, isDefault] = this.scalarInfo(type, value); if (!isDefault || emitDefault) { writer.tag(fieldNo, wireType); writer[method](value); } } /** * Write an array of scalar values in packed format. */ packed(writer, type, fieldNo, value) { if (!value.length) return; assert_1$4.assert(type !== reflection_info_1$5.ScalarType.BYTES && type !== reflection_info_1$5.ScalarType.STRING); writer.tag(fieldNo, binary_format_contract_1$1.WireType.LengthDelimited); writer.fork(); let [, method] = this.scalarInfo(type); for (let i = 0; i < value.length; i++) writer[method](value[i]); writer.join(); } /** * Get information for writing a scalar value. * * Returns tuple: * [0]: appropriate WireType * [1]: name of the appropriate method of IBinaryWriter * [2]: whether the given value is a default value * * If argument `value` is omitted, [2] is always false. */ scalarInfo(type, value) { let t = binary_format_contract_1$1.WireType.Varint; let m$1; let i = value === void 0; let d$1 = value === 0; switch (type) { case reflection_info_1$5.ScalarType.INT32: m$1 = "int32"; break; case reflection_info_1$5.ScalarType.STRING: d$1 = i || !value.length; t = binary_format_contract_1$1.WireType.LengthDelimited; m$1 = "string"; break; case reflection_info_1$5.ScalarType.BOOL: d$1 = value === false; m$1 = "bool"; break; case reflection_info_1$5.ScalarType.UINT32: m$1 = "uint32"; break; case reflection_info_1$5.ScalarType.DOUBLE: t = binary_format_contract_1$1.WireType.Bit64; m$1 = "double"; break; case reflection_info_1$5.ScalarType.FLOAT: t = binary_format_contract_1$1.WireType.Bit32; m$1 = "float"; break; case reflection_info_1$5.ScalarType.INT64: d$1 = i || pb_long_1$1.PbLong.from(value).isZero(); m$1 = "int64"; break; case reflection_info_1$5.ScalarType.UINT64: d$1 = i || pb_long_1$1.PbULong.from(value).isZero(); m$1 = "uint64"; break; case reflection_info_1$5.ScalarType.FIXED64: d$1 = i || pb_long_1$1.PbULong.from(value).isZero(); t = binary_format_contract_1$1.WireType.Bit64; m$1 = "fixed64"; break; case reflection_info_1$5.ScalarType.BYTES: d$1 = i || !value.byteLength; t = binary_format_contract_1$1.WireType.LengthDelimited; m$1 = "bytes"; break; case reflection_info_1$5.ScalarType.FIXED32: t = binary_format_contract_1$1.WireType.Bit32; m$1 = "fixed32"; break; case reflection_info_1$5.ScalarType.SFIXED32: t = binary_format_contract_1$1.WireType.Bit32; m$1 = "sfixed32"; break; case reflection_info_1$5.ScalarType.SFIXED64: d$1 = i || pb_long_1$1.PbLong.from(value).isZero(); t = binary_format_contract_1$1.WireType.Bit64; m$1 = "sfixed64"; break; case reflection_info_1$5.ScalarType.SINT32: m$1 = "sint32"; break; case reflection_info_1$5.ScalarType.SINT64: d$1 = i || pb_long_1$1.PbLong.from(value).isZero(); m$1 = "sint64"; break; } return [ t, m$1, i || d$1 ]; } }; exports.ReflectionBinaryWriter = ReflectionBinaryWriter; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js var require_reflection_create = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.reflectionCreate = void 0; const reflection_scalar_default_1$1 = require_reflection_scalar_default(); const message_type_contract_1$3 = require_message_type_contract(); /** * Creates an instance of the generic message, using the field * information. */ function reflectionCreate(type) { /** * This ternary can be removed in the next major version. * The `Object.create()` code path utilizes a new `messagePrototype` * property on the `IMessageType` which has this same `MESSAGE_TYPE` * non-enumerable property on it. Doing it this way means that we only * pay the cost of `Object.defineProperty()` once per `IMessageType` * class of once per "instance". The falsy code path is only provided * for backwards compatibility in cases where the runtime library is * updated without also updating the generated code. */ const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1$3.MESSAGE_TYPE, { value: type }); for (let field of type.fields) { let name = field.localName; if (field.opt) continue; if (field.oneof) msg[field.oneof] = { oneofKind: void 0 }; else if (field.repeat) msg[name] = []; else switch (field.kind) { case "scalar": msg[name] = reflection_scalar_default_1$1.reflectionScalarDefault(field.T, field.L); break; case "enum": msg[name] = 0; break; case "map": msg[name] = {}; break; } } return msg; } exports.reflectionCreate = reflectionCreate; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js var require_reflection_merge_partial = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.reflectionMergePartial = void 0; /** * Copy partial data into the target message. * * If a singular scalar or enum field is present in the source, it * replaces the field in the target. * * If a singular message field is present in the source, it is merged * with the target field by calling mergePartial() of the responsible * message type. * * If a repeated field is present in the source, its values replace * all values in the target array, removing extraneous values. * Repeated message fields are copied, not merged. * * If a map field is present in the source, entries are added to the * target map, replacing entries with the same key. Entries that only * exist in the target remain. Entries with message values are copied, * not merged. * * Note that this function differs from protobuf merge semantics, * which appends repeated fields. */ function reflectionMergePartial(info, target, source) { let fieldValue, input = source, output; for (let field of info.fields) { let name = field.localName; if (field.oneof) { const group = input[field.oneof]; if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) continue; fieldValue = group[name]; output = target[field.oneof]; output.oneofKind = group.oneofKind; if (fieldValue == void 0) { delete output[name]; continue; } } else { fieldValue = input[name]; output = target; if (fieldValue == void 0) continue; } if (field.repeat) output[name].length = fieldValue.length; switch (field.kind) { case "scalar": case "enum": if (field.repeat) for (let i = 0; i < fieldValue.length; i++) output[name][i] = fieldValue[i]; else output[name] = fieldValue; break; case "message": let T = field.T(); if (field.repeat) for (let i = 0; i < fieldValue.length; i++) output[name][i] = T.create(fieldValue[i]); else if (output[name] === void 0) output[name] = T.create(fieldValue); else T.mergePartial(output[name], fieldValue); break; case "map": switch (field.V.kind) { case "scalar": case "enum": Object.assign(output[name], fieldValue); break; case "message": let T$1 = field.V.T(); for (let k of Object.keys(fieldValue)) output[name][k] = T$1.create(fieldValue[k]); break; } break; } } } exports.reflectionMergePartial = reflectionMergePartial; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js var require_reflection_equals = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.reflectionEquals = void 0; const reflection_info_1$4 = require_reflection_info$1(); /** * Determines whether two message of the same type have the same field values. * Checks for deep equality, traversing repeated fields, oneof groups, maps * and messages recursively. * Will also return true if both messages are `undefined`. */ function reflectionEquals(info, a, b) { if (a === b) return true; if (!a || !b) return false; for (let field of info.fields) { let localName = field.localName; let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; switch (field.kind) { case "enum": case "scalar": let t = field.kind == "enum" ? reflection_info_1$4.ScalarType.INT32 : field.T; if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) return false; break; case "map": if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1$4.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) return false; break; case "message": let T = field.T(); if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) return false; break; } } return true; } exports.reflectionEquals = reflectionEquals; const objectValues = Object.values; function primitiveEq(type, a, b) { if (a === b) return true; if (type !== reflection_info_1$4.ScalarType.BYTES) return false; let ba = a; let bb = b; if (ba.length !== bb.length) return false; for (let i = 0; i < ba.length; i++) if (ba[i] != bb[i]) return false; return true; } function repeatedPrimitiveEq(type, a, b) { if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) if (!primitiveEq(type, a[i], b[i])) return false; return true; } function repeatedMsgEq(type, a, b) { if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) if (!type.equals(a[i], b[i])) return false; return true; } } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js var require_message_type = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.MessageType = void 0; const message_type_contract_1$2 = require_message_type_contract(); const reflection_info_1$3 = require_reflection_info$1(); const reflection_type_check_1$1 = require_reflection_type_check(); const reflection_json_reader_1$1 = require_reflection_json_reader(); const reflection_json_writer_1$1 = require_reflection_json_writer(); const reflection_binary_reader_1$1 = require_reflection_binary_reader(); const reflection_binary_writer_1$1 = require_reflection_binary_writer(); const reflection_create_1$1 = require_reflection_create(); const reflection_merge_partial_1$1 = require_reflection_merge_partial(); const json_typings_1$1 = require_json_typings(); const json_format_contract_1$1 = require_json_format_contract(); const reflection_equals_1$1 = require_reflection_equals(); const binary_writer_1$1 = require_binary_writer(); const binary_reader_1$1 = require_binary_reader(); const baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); /** * This standard message type provides reflection-based * operations to work with a message. */ var MessageType = class { constructor(name, fields, options) { this.defaultCheckDepth = 16; this.typeName = name; this.fields = fields.map(reflection_info_1$3.normalizeFieldInfo); this.options = options !== null && options !== void 0 ? options : {}; this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1$2.MESSAGE_TYPE]: { value: this } })); this.refTypeCheck = new reflection_type_check_1$1.ReflectionTypeCheck(this); this.refJsonReader = new reflection_json_reader_1$1.ReflectionJsonReader(this); this.refJsonWriter = new reflection_json_writer_1$1.ReflectionJsonWriter(this); this.refBinReader = new reflection_binary_reader_1$1.ReflectionBinaryReader(this); this.refBinWriter = new reflection_binary_writer_1$1.ReflectionBinaryWriter(this); } create(value) { let message = reflection_create_1$1.reflectionCreate(this); if (value !== void 0) reflection_merge_partial_1$1.reflectionMergePartial(this, message, value); return message; } /** * Clone the message. * * Unknown fields are discarded. */ clone(message) { let copy$1 = this.create(); reflection_merge_partial_1$1.reflectionMergePartial(this, copy$1, message); return copy$1; } /** * Determines whether two message of the same type have the same field values. * Checks for deep equality, traversing repeated fields, oneof groups, maps * and messages recursively. * Will also return true if both messages are `undefined`. */ equals(a, b) { return reflection_equals_1$1.reflectionEquals(this, a, b); } /** * Is the given value assignable to our message type * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ is(arg, depth = this.defaultCheckDepth) { return this.refTypeCheck.is(arg, depth, false); } /** * Is the given value assignable to our message type, * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ isAssignable(arg, depth = this.defaultCheckDepth) { return this.refTypeCheck.is(arg, depth, true); } /** * Copy partial data into the target message. */ mergePartial(target, source) { reflection_merge_partial_1$1.reflectionMergePartial(this, target, source); } /** * Create a new message from binary format. */ fromBinary(data, options) { let opt = binary_reader_1$1.binaryReadOptions(options); return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); } /** * Read a new message from a JSON value. */ fromJson(json$1, options) { return this.internalJsonRead(json$1, json_format_contract_1$1.jsonReadOptions(options)); } /** * Read a new message from a JSON string. * This is equivalent to `T.fromJson(JSON.parse(json))`. */ fromJsonString(json$1, options) { let value = JSON.parse(json$1); return this.fromJson(value, options); } /** * Write the message to canonical JSON value. */ toJson(message, options) { return this.internalJsonWrite(message, json_format_contract_1$1.jsonWriteOptions(options)); } /** * Convert the message to canonical JSON string. * This is equivalent to `JSON.stringify(T.toJson(t))` */ toJsonString(message, options) { var _a$2; let value = this.toJson(message, options); return JSON.stringify(value, null, (_a$2 = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a$2 !== void 0 ? _a$2 : 0); } /** * Write the message to binary format. */ toBinary(message, options) { let opt = binary_writer_1$1.binaryWriteOptions(options); return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); } /** * This is an internal method. If you just want to read a message from * JSON, use `fromJson()` or `fromJsonString()`. * * Reads JSON value and merges the fields into the target * according to protobuf rules. If the target is omitted, * a new instance is created first. */ internalJsonRead(json$1, options, target) { if (json$1 !== null && typeof json$1 == "object" && !Array.isArray(json$1)) { let message = target !== null && target !== void 0 ? target : this.create(); this.refJsonReader.read(json$1, message, options); return message; } throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1$1.typeofJsonValue(json$1)}.`); } /** * This is an internal method. If you just want to write a message * to JSON, use `toJson()` or `toJsonString(). * * Writes JSON value and returns it. */ internalJsonWrite(message, options) { return this.refJsonWriter.write(message, options); } /** * This is an internal method. If you just want to write a message * in binary format, use `toBinary()`. * * Serializes the message in binary format and appends it to the given * writer. Returns passed writer. */ internalBinaryWrite(message, writer, options) { this.refBinWriter.write(message, writer, options); return writer; } /** * This is an internal method. If you just want to read a message from * binary data, use `fromBinary()`. * * Reads data from binary format and merges the fields into * the target according to protobuf rules. If the target is * omitted, a new instance is created first. */ internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(); this.refBinReader.read(reader, message, options, length); return message; } }; exports.MessageType = MessageType; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js var require_reflection_contains_message_type = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.containsMessageType = void 0; const message_type_contract_1$1 = require_message_type_contract(); /** * Check if the provided object is a proto message. * * Note that this is an experimental feature - it is here to stay, but * implementation details may change without notice. */ function containsMessageType(msg) { return msg[message_type_contract_1$1.MESSAGE_TYPE] != null; } exports.containsMessageType = containsMessageType; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js var require_enum_object = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.listEnumNumbers = exports.listEnumNames = exports.listEnumValues = exports.isEnumObject = void 0; /** * Is this a lookup object generated by Typescript, for a Typescript enum * generated by protobuf-ts? * * - No `const enum` (enum must not be inlined, we need reverse mapping). * - No string enum (we need int32 for protobuf). * - Must have a value for 0 (otherwise, we would need to support custom default values). */ function isEnumObject(arg) { if (typeof arg != "object" || arg === null) return false; if (!arg.hasOwnProperty(0)) return false; for (let k of Object.keys(arg)) { let num = parseInt(k); if (!Number.isNaN(num)) { let nam = arg[num]; if (nam === void 0) return false; if (arg[nam] !== num) return false; } else { let num$1 = arg[k]; if (num$1 === void 0) return false; if (typeof num$1 !== "number") return false; if (arg[num$1] === void 0) return false; } } return true; } exports.isEnumObject = isEnumObject; /** * Lists all values of a Typescript enum, as an array of objects with a "name" * property and a "number" property. * * Note that it is possible that a number appears more than once, because it is * possible to have aliases in an enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumValues(enumObject) { if (!isEnumObject(enumObject)) throw new Error("not a typescript enum object"); let values = []; for (let [name, number] of Object.entries(enumObject)) if (typeof number == "number") values.push({ name, number }); return values; } exports.listEnumValues = listEnumValues; /** * Lists the names of a Typescript enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumNames(enumObject) { return listEnumValues(enumObject).map((val) => val.name); } exports.listEnumNames = listEnumNames; /** * Lists the numbers of a Typescript enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumNumbers(enumObject) { return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); } exports.listEnumNumbers = listEnumNumbers; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/index.js var require_commonjs$1 = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime@2.10.0/node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports) { // webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", { value: true }); var json_typings_1 = require_json_typings(); Object.defineProperty(exports, "typeofJsonValue", { enumerable: true, get: function() { return json_typings_1.typeofJsonValue; } }); Object.defineProperty(exports, "isJsonObject", { enumerable: true, get: function() { return json_typings_1.isJsonObject; } }); var base64_1 = require_base64(); Object.defineProperty(exports, "base64decode", { enumerable: true, get: function() { return base64_1.base64decode; } }); Object.defineProperty(exports, "base64encode", { enumerable: true, get: function() { return base64_1.base64encode; } }); var protobufjs_utf8_1 = require_protobufjs_utf8(); Object.defineProperty(exports, "utf8read", { enumerable: true, get: function() { return protobufjs_utf8_1.utf8read; } }); var binary_format_contract_1 = require_binary_format_contract(); Object.defineProperty(exports, "WireType", { enumerable: true, get: function() { return binary_format_contract_1.WireType; } }); Object.defineProperty(exports, "mergeBinaryOptions", { enumerable: true, get: function() { return binary_format_contract_1.mergeBinaryOptions; } }); Object.defineProperty(exports, "UnknownFieldHandler", { enumerable: true, get: function() { return binary_format_contract_1.UnknownFieldHandler; } }); var binary_reader_1 = require_binary_reader(); Object.defineProperty(exports, "BinaryReader", { enumerable: true, get: function() { return binary_reader_1.BinaryReader; } }); Object.defineProperty(exports, "binaryReadOptions", { enumerable: true, get: function() { return binary_reader_1.binaryReadOptions; } }); var binary_writer_1 = require_binary_writer(); Object.defineProperty(exports, "BinaryWriter", { enumerable: true, get: function() { return binary_writer_1.BinaryWriter; } }); Object.defineProperty(exports, "binaryWriteOptions", { enumerable: true, get: function() { return binary_writer_1.binaryWriteOptions; } }); var pb_long_1 = require_pb_long(); Object.defineProperty(exports, "PbLong", { enumerable: true, get: function() { return pb_long_1.PbLong; } }); Object.defineProperty(exports, "PbULong", { enumerable: true, get: function() { return pb_long_1.PbULong; } }); var json_format_contract_1 = require_json_format_contract(); Object.defineProperty(exports, "jsonReadOptions", { enumerable: true, get: function() { return json_format_contract_1.jsonReadOptions; } }); Object.defineProperty(exports, "jsonWriteOptions", { enumerable: true, get: function() { return json_format_contract_1.jsonWriteOptions; } }); Object.defineProperty(exports, "mergeJsonOptions", { enumerable: true, get: function() { return json_format_contract_1.mergeJsonOptions; } }); var message_type_contract_1 = require_message_type_contract(); Object.defineProperty(exports, "MESSAGE_TYPE", { enumerable: true, get: function() { return message_type_contract_1.MESSAGE_TYPE; } }); var message_type_1 = require_message_type(); Object.defineProperty(exports, "MessageType", { enumerable: true, get: function() { return message_type_1.MessageType; } }); var reflection_info_1$2 = require_reflection_info$1(); Object.defineProperty(exports, "ScalarType", { enumerable: true, get: function() { return reflection_info_1$2.ScalarType; } }); Object.defineProperty(exports, "LongType", { enumerable: true, get: function() { return reflection_info_1$2.LongType; } }); Object.defineProperty(exports, "RepeatType", { enumerable: true, get: function() { return reflection_info_1$2.RepeatType; } }); Object.defineProperty(exports, "normalizeFieldInfo", { enumerable: true, get: function() { return reflection_info_1$2.normalizeFieldInfo; } }); Object.defineProperty(exports, "readFieldOptions", { enumerable: true, get: function() { return reflection_info_1$2.readFieldOptions; } }); Object.defineProperty(exports, "readFieldOption", { enumerable: true, get: function() { return reflection_info_1$2.readFieldOption; } }); Object.defineProperty(exports, "readMessageOption", { enumerable: true, get: function() { return reflection_info_1$2.readMessageOption; } }); var reflection_type_check_1 = require_reflection_type_check(); Object.defineProperty(exports, "ReflectionTypeCheck", { enumerable: true, get: function() { return reflection_type_check_1.ReflectionTypeCheck; } }); var reflection_create_1 = require_reflection_create(); Object.defineProperty(exports, "reflectionCreate", { enumerable: true, get: function() { return reflection_create_1.reflectionCreate; } }); var reflection_scalar_default_1 = require_reflection_scalar_default(); Object.defineProperty(exports, "reflectionScalarDefault", { enumerable: true, get: function() { return reflection_scalar_default_1.reflectionScalarDefault; } }); var reflection_merge_partial_1 = require_reflection_merge_partial(); Object.defineProperty(exports, "reflectionMergePartial", { enumerable: true, get: function() { return reflection_merge_partial_1.reflectionMergePartial; } }); var reflection_equals_1 = require_reflection_equals(); Object.defineProperty(exports, "reflectionEquals", { enumerable: true, get: function() { return reflection_equals_1.reflectionEquals; } }); var reflection_binary_reader_1 = require_reflection_binary_reader(); Object.defineProperty(exports, "ReflectionBinaryReader", { enumerable: true, get: function() { return reflection_binary_reader_1.ReflectionBinaryReader; } }); var reflection_binary_writer_1 = require_reflection_binary_writer(); Object.defineProperty(exports, "ReflectionBinaryWriter", { enumerable: true, get: function() { return reflection_binary_writer_1.ReflectionBinaryWriter; } }); var reflection_json_reader_1 = require_reflection_json_reader(); Object.defineProperty(exports, "ReflectionJsonReader", { enumerable: true, get: function() { return reflection_json_reader_1.ReflectionJsonReader; } }); var reflection_json_writer_1 = require_reflection_json_writer(); Object.defineProperty(exports, "ReflectionJsonWriter", { enumerable: true, get: function() { return reflection_json_writer_1.ReflectionJsonWriter; } }); var reflection_contains_message_type_1 = require_reflection_contains_message_type(); Object.defineProperty(exports, "containsMessageType", { enumerable: true, get: function() { return reflection_contains_message_type_1.containsMessageType; } }); var oneof_1 = require_oneof(); Object.defineProperty(exports, "isOneofGroup", { enumerable: true, get: function() { return oneof_1.isOneofGroup; } }); Object.defineProperty(exports, "setOneofValue", { enumerable: true, get: function() { return oneof_1.setOneofValue; } }); Object.defineProperty(exports, "getOneofValue", { enumerable: true, get: function() { return oneof_1.getOneofValue; } }); Object.defineProperty(exports, "clearOneofValue", { enumerable: true, get: function() { return oneof_1.clearOneofValue; } }); Object.defineProperty(exports, "getSelectedOneofValue", { enumerable: true, get: function() { return oneof_1.getSelectedOneofValue; } }); var enum_object_1 = require_enum_object(); Object.defineProperty(exports, "listEnumValues", { enumerable: true, get: function() { return enum_object_1.listEnumValues; } }); Object.defineProperty(exports, "listEnumNames", { enumerable: true, get: function() { return enum_object_1.listEnumNames; } }); Object.defineProperty(exports, "listEnumNumbers", { enumerable: true, get: function() { return enum_object_1.listEnumNumbers; } }); Object.defineProperty(exports, "isEnumObject", { enumerable: true, get: function() { return enum_object_1.isEnumObject; } }); var lower_camel_case_1 = require_lower_camel_case(); Object.defineProperty(exports, "lowerCamelCase", { enumerable: true, get: function() { return lower_camel_case_1.lowerCamelCase; } }); var assert_1$3 = require_assert(); Object.defineProperty(exports, "assert", { enumerable: true, get: function() { return assert_1$3.assert; } }); Object.defineProperty(exports, "assertNever", { enumerable: true, get: function() { return assert_1$3.assertNever; } }); Object.defineProperty(exports, "assertInt32", { enumerable: true, get: function() { return assert_1$3.assertInt32; } }); Object.defineProperty(exports, "assertUInt32", { enumerable: true, get: function() { return assert_1$3.assertUInt32; } }); Object.defineProperty(exports, "assertFloat32", { enumerable: true, get: function() { return assert_1$3.assertFloat32; } }); } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js var require_reflection_info = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.readServiceOption = exports.readMethodOption = exports.readMethodOptions = exports.normalizeMethodInfo = void 0; const runtime_1$7 = require_commonjs$1(); /** * Turns PartialMethodInfo into MethodInfo. */ function normalizeMethodInfo(method, service) { var _a$2, _b$1, _c$1; let m$1 = method; m$1.service = service; m$1.localName = (_a$2 = m$1.localName) !== null && _a$2 !== void 0 ? _a$2 : runtime_1$7.lowerCamelCase(m$1.name); m$1.serverStreaming = !!m$1.serverStreaming; m$1.clientStreaming = !!m$1.clientStreaming; m$1.options = (_b$1 = m$1.options) !== null && _b$1 !== void 0 ? _b$1 : {}; m$1.idempotency = (_c$1 = m$1.idempotency) !== null && _c$1 !== void 0 ? _c$1 : void 0; return m$1; } exports.normalizeMethodInfo = normalizeMethodInfo; /** * Read custom method options from a generated service client. * * @deprecated use readMethodOption() */ function readMethodOptions(service, methodName, extensionName, extensionType) { var _a$2; const options = (_a$2 = service.methods.find((m$1, i) => m$1.localName === methodName || i === methodName)) === null || _a$2 === void 0 ? void 0 : _a$2.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } exports.readMethodOptions = readMethodOptions; function readMethodOption(service, methodName, extensionName, extensionType) { var _a$2; const options = (_a$2 = service.methods.find((m$1, i) => m$1.localName === methodName || i === methodName)) === null || _a$2 === void 0 ? void 0 : _a$2.options; if (!options) return void 0; const optionVal = options[extensionName]; if (optionVal === void 0) return optionVal; return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readMethodOption = readMethodOption; function readServiceOption(service, extensionName, extensionType) { const options = service.options; if (!options) return void 0; const optionVal = options[extensionName]; if (optionVal === void 0) return optionVal; return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readServiceOption = readServiceOption; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js var require_service_type = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ServiceType = void 0; const reflection_info_1$1 = require_reflection_info(); var ServiceType = class { constructor(typeName, methods, options) { this.typeName = typeName; this.methods = methods.map((i) => reflection_info_1$1.normalizeMethodInfo(i, this)); this.options = options !== null && options !== void 0 ? options : {}; } }; exports.ServiceType = ServiceType; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js var require_rpc_error = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.RpcError = void 0; /** * An error that occurred while calling a RPC method. */ var RpcError = class extends Error { constructor(message, code = "UNKNOWN", meta) { super(message); this.name = "RpcError"; Object.setPrototypeOf(this, new.target.prototype); this.code = code; this.meta = meta !== null && meta !== void 0 ? meta : {}; } toString() { const l = [this.name + ": " + this.message]; if (this.code) { l.push(""); l.push("Code: " + this.code); } if (this.serviceName && this.methodName) l.push("Method: " + this.serviceName + "/" + this.methodName); let m$1 = Object.entries(this.meta); if (m$1.length) { l.push(""); l.push("Meta:"); for (let [k, v] of m$1) l.push(` ${k}: ${v}`); } return l.join("\n"); } }; exports.RpcError = RpcError; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js var require_rpc_options = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.mergeRpcOptions = void 0; const runtime_1$6 = require_commonjs$1(); /** * Merges custom RPC options with defaults. Returns a new instance and keeps * the "defaults" and the "options" unmodified. * * Merges `RpcMetadata` "meta", overwriting values from "defaults" with * values from "options". Does not append values to existing entries. * * Merges "jsonOptions", including "jsonOptions.typeRegistry", by creating * a new array that contains types from "options.jsonOptions.typeRegistry" * first, then types from "defaults.jsonOptions.typeRegistry". * * Merges "binaryOptions". * * Merges "interceptors" by creating a new array that contains interceptors * from "defaults" first, then interceptors from "options". * * Works with objects that extend `RpcOptions`, but only if the added * properties are of type Date, primitive like string, boolean, or Array * of primitives. If you have other property types, you have to merge them * yourself. */ function mergeRpcOptions(defaults, options) { if (!options) return defaults; let o = {}; copy(defaults, o); copy(options, o); for (let key of Object.keys(options)) { let val = options[key]; switch (key) { case "jsonOptions": o.jsonOptions = runtime_1$6.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); break; case "binaryOptions": o.binaryOptions = runtime_1$6.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); break; case "meta": o.meta = {}; copy(defaults.meta, o.meta); copy(options.meta, o.meta); break; case "interceptors": o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); break; } } return o; } exports.mergeRpcOptions = mergeRpcOptions; function copy(a, into) { if (!a) return; let c = into; for (let [k, v] of Object.entries(a)) if (v instanceof Date) c[k] = new Date(v.getTime()); else if (Array.isArray(v)) c[k] = v.concat(); else c[k] = v; } } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js var require_deferred = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.Deferred = exports.DeferredState = void 0; var DeferredState; (function(DeferredState$1) { DeferredState$1[DeferredState$1["PENDING"] = 0] = "PENDING"; DeferredState$1[DeferredState$1["REJECTED"] = 1] = "REJECTED"; DeferredState$1[DeferredState$1["RESOLVED"] = 2] = "RESOLVED"; })(DeferredState = exports.DeferredState || (exports.DeferredState = {})); /** * A deferred promise. This is a "controller" for a promise, which lets you * pass a promise around and reject or resolve it from the outside. * * Warning: This class is to be used with care. Using it can make code very * difficult to read. It is intended for use in library code that exposes * promises, not for regular business logic. */ var Deferred = class { /** * @param preventUnhandledRejectionWarning - prevents the warning * "Unhandled Promise rejection" by adding a noop rejection handler. * Working with calls returned from the runtime-rpc package in an * async function usually means awaiting one call property after * the other. This means that the "status" is not being awaited when * an earlier await for the "headers" is rejected. This causes the * "unhandled promise reject" warning. A more correct behaviour for * calls might be to become aware whether at least one of the * promises is handled and swallow the rejection warning for the * others. */ constructor(preventUnhandledRejectionWarning = true) { this._state = DeferredState.PENDING; this._promise = new Promise((resolve, reject) => { this._resolve = resolve; this._reject = reject; }); if (preventUnhandledRejectionWarning) this._promise.catch((_) => {}); } /** * Get the current state of the promise. */ get state() { return this._state; } /** * Get the deferred promise. */ get promise() { return this._promise; } /** * Resolve the promise. Throws if the promise is already resolved or rejected. */ resolve(value) { if (this.state !== DeferredState.PENDING) throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); this._resolve(value); this._state = DeferredState.RESOLVED; } /** * Reject the promise. Throws if the promise is already resolved or rejected. */ reject(reason) { if (this.state !== DeferredState.PENDING) throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); this._reject(reason); this._state = DeferredState.REJECTED; } /** * Resolve the promise. Ignore if not pending. */ resolvePending(val) { if (this._state === DeferredState.PENDING) this.resolve(val); } /** * Reject the promise. Ignore if not pending. */ rejectPending(reason) { if (this._state === DeferredState.PENDING) this.reject(reason); } }; exports.Deferred = Deferred; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js var require_rpc_output_stream = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.RpcOutputStreamController = void 0; const deferred_1$1 = require_deferred(); const runtime_1$5 = require_commonjs$1(); /** * A `RpcOutputStream` that you control. */ var RpcOutputStreamController = class { constructor() { this._lis = { nxt: [], msg: [], err: [], cmp: [] }; this._closed = false; this._itState = { q: [] }; } onNext(callback) { return this.addLis(callback, this._lis.nxt); } onMessage(callback) { return this.addLis(callback, this._lis.msg); } onError(callback) { return this.addLis(callback, this._lis.err); } onComplete(callback) { return this.addLis(callback, this._lis.cmp); } addLis(callback, list) { list.push(callback); return () => { let i = list.indexOf(callback); if (i >= 0) list.splice(i, 1); }; } clearLis() { for (let l of Object.values(this._lis)) l.splice(0, l.length); } /** * Is this stream already closed by a completion or error? */ get closed() { return this._closed !== false; } /** * Emit message, close with error, or close successfully, but only one * at a time. * Can be used to wrap a stream by using the other stream's `onNext`. */ notifyNext(message, error, complete) { runtime_1$5.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); if (message) this.notifyMessage(message); if (error) this.notifyError(error); if (complete) this.notifyComplete(); } /** * Emits a new message. Throws if stream is closed. * * Triggers onNext and onMessage callbacks. */ notifyMessage(message) { runtime_1$5.assert(!this.closed, "stream is closed"); this.pushIt({ value: message, done: false }); this._lis.msg.forEach((l) => l(message)); this._lis.nxt.forEach((l) => l(message, void 0, false)); } /** * Closes the stream with an error. Throws if stream is closed. * * Triggers onNext and onError callbacks. */ notifyError(error) { runtime_1$5.assert(!this.closed, "stream is closed"); this._closed = error; this.pushIt(error); this._lis.err.forEach((l) => l(error)); this._lis.nxt.forEach((l) => l(void 0, error, false)); this.clearLis(); } /** * Closes the stream successfully. Throws if stream is closed. * * Triggers onNext and onComplete callbacks. */ notifyComplete() { runtime_1$5.assert(!this.closed, "stream is closed"); this._closed = true; this.pushIt({ value: null, done: true }); this._lis.cmp.forEach((l) => l()); this._lis.nxt.forEach((l) => l(void 0, void 0, true)); this.clearLis(); } /** * Creates an async iterator (that can be used with `for await {...}`) * to consume the stream. * * Some things to note: * - If an error occurs, the `for await` will throw it. * - If an error occurred before the `for await` was started, `for await` * will re-throw it. * - If the stream is already complete, the `for await` will be empty. * - If your `for await` consumes slower than the stream produces, * for example because you are relaying messages in a slow operation, * messages are queued. */ [Symbol.asyncIterator]() { if (this._closed === true) this.pushIt({ value: null, done: true }); else if (this._closed !== false) this.pushIt(this._closed); return { next: () => { let state$1 = this._itState; runtime_1$5.assert(state$1, "bad state"); runtime_1$5.assert(!state$1.p, "iterator contract broken"); let first = state$1.q.shift(); if (first) return "value" in first ? Promise.resolve(first) : Promise.reject(first); state$1.p = new deferred_1$1.Deferred(); return state$1.p.promise; } }; } pushIt(result) { let state$1 = this._itState; if (state$1.p) { const p = state$1.p; runtime_1$5.assert(p.state == deferred_1$1.DeferredState.PENDING, "iterator contract broken"); "value" in result ? p.resolve(result) : p.reject(result); delete state$1.p; } else state$1.q.push(result); } }; exports.RpcOutputStreamController = RpcOutputStreamController; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js var require_unary_call = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports) { var __awaiter$10 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.UnaryCall = void 0; /** * A unary RPC call. Unary means there is exactly one input message and * exactly one output message unless an error occurred. */ var UnaryCall = class { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.request = request; this.headers = headers; this.response = response; this.status = status; this.trailers = trailers; } /** * If you are only interested in the final outcome of this call, * you can await it to receive a `FinishedUnaryCall`. */ then(onfulfilled, onrejected) { return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter$10(this, void 0, void 0, function* () { let [headers, response, status, trailers] = yield Promise.all([ this.headers, this.response, this.status, this.trailers ]); return { method: this.method, requestHeaders: this.requestHeaders, request: this.request, headers, response, status, trailers }; }); } }; exports.UnaryCall = UnaryCall; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js var require_server_streaming_call = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports) { var __awaiter$9 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.ServerStreamingCall = void 0; /** * A server streaming RPC call. The client provides exactly one input message * but the server may respond with 0, 1, or more messages. */ var ServerStreamingCall = class { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.request = request; this.headers = headers; this.responses = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * You should first setup some listeners to the `request` to * see the actual messages the server replied with. */ then(onfulfilled, onrejected) { return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter$9(this, void 0, void 0, function* () { let [headers, status, trailers] = yield Promise.all([ this.headers, this.status, this.trailers ]); return { method: this.method, requestHeaders: this.requestHeaders, request: this.request, headers, status, trailers }; }); } }; exports.ServerStreamingCall = ServerStreamingCall; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js var require_client_streaming_call = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports) { var __awaiter$8 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.ClientStreamingCall = void 0; /** * A client streaming RPC call. This means that the clients sends 0, 1, or * more messages to the server, and the server replies with exactly one * message. */ var ClientStreamingCall = class { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.requests = request; this.headers = headers; this.response = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * Note that it may still be valid to send more request messages. */ then(onfulfilled, onrejected) { return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter$8(this, void 0, void 0, function* () { let [headers, response, status, trailers] = yield Promise.all([ this.headers, this.response, this.status, this.trailers ]); return { method: this.method, requestHeaders: this.requestHeaders, headers, response, status, trailers }; }); } }; exports.ClientStreamingCall = ClientStreamingCall; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js var require_duplex_streaming_call = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports) { var __awaiter$7 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.DuplexStreamingCall = void 0; /** * A duplex streaming RPC call. This means that the clients sends an * arbitrary amount of messages to the server, while at the same time, * the server sends an arbitrary amount of messages to the client. */ var DuplexStreamingCall = class { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.requests = request; this.headers = headers; this.responses = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * Note that it may still be valid to send more request messages. */ then(onfulfilled, onrejected) { return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter$7(this, void 0, void 0, function* () { let [headers, status, trailers] = yield Promise.all([ this.headers, this.status, this.trailers ]); return { method: this.method, requestHeaders: this.requestHeaders, headers, status, trailers }; }); } }; exports.DuplexStreamingCall = DuplexStreamingCall; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js var require_test_transport = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports) { var __awaiter$6 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.TestTransport = void 0; const rpc_error_1$1 = require_rpc_error(); const runtime_1$4 = require_commonjs$1(); const rpc_output_stream_1$1 = require_rpc_output_stream(); const rpc_options_1$1 = require_rpc_options(); const unary_call_1$1 = require_unary_call(); const server_streaming_call_1$1 = require_server_streaming_call(); const client_streaming_call_1$1 = require_client_streaming_call(); const duplex_streaming_call_1$1 = require_duplex_streaming_call(); /** * Transport for testing. */ var TestTransport = class TestTransport { /** * Initialize with mock data. Omitted fields have default value. */ constructor(data) { /** * Suppress warning / error about uncaught rejections of * "status" and "trailers". */ this.suppressUncaughtRejections = true; this.headerDelay = 10; this.responseDelay = 50; this.betweenResponseDelay = 10; this.afterResponseDelay = 10; this.data = data !== null && data !== void 0 ? data : {}; } /** * Sent message(s) during the last operation. */ get sentMessages() { if (this.lastInput instanceof TestInputStream) return this.lastInput.sent; else if (typeof this.lastInput == "object") return [this.lastInput.single]; return []; } /** * Sending message(s) completed? */ get sendComplete() { if (this.lastInput instanceof TestInputStream) return this.lastInput.completed; else if (typeof this.lastInput == "object") return true; return false; } promiseHeaders() { var _a$2; const headers = (_a$2 = this.data.headers) !== null && _a$2 !== void 0 ? _a$2 : TestTransport.defaultHeaders; return headers instanceof rpc_error_1$1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); } promiseSingleResponse(method) { if (this.data.response instanceof rpc_error_1$1.RpcError) return Promise.reject(this.data.response); let r; if (Array.isArray(this.data.response)) { runtime_1$4.assert(this.data.response.length > 0); r = this.data.response[0]; } else if (this.data.response !== void 0) r = this.data.response; else r = method.O.create(); runtime_1$4.assert(method.O.is(r)); return Promise.resolve(r); } /** * Pushes response messages from the mock data to the output stream. * If an error response, status or trailers are mocked, the stream is * closed with the respective error. * Otherwise, stream is completed successfully. * * The returned promise resolves when the stream is closed. It should * not reject. If it does, code is broken. */ streamResponses(method, stream$3, abort) { return __awaiter$6(this, void 0, void 0, function* () { const messages = []; if (this.data.response === void 0) messages.push(method.O.create()); else if (Array.isArray(this.data.response)) for (let msg of this.data.response) { runtime_1$4.assert(method.O.is(msg)); messages.push(msg); } else if (!(this.data.response instanceof rpc_error_1$1.RpcError)) { runtime_1$4.assert(method.O.is(this.data.response)); messages.push(this.data.response); } try { yield delay(this.responseDelay, abort)(void 0); } catch (error) { stream$3.notifyError(error); return; } if (this.data.response instanceof rpc_error_1$1.RpcError) { stream$3.notifyError(this.data.response); return; } for (let msg of messages) { stream$3.notifyMessage(msg); try { yield delay(this.betweenResponseDelay, abort)(void 0); } catch (error) { stream$3.notifyError(error); return; } } if (this.data.status instanceof rpc_error_1$1.RpcError) { stream$3.notifyError(this.data.status); return; } if (this.data.trailers instanceof rpc_error_1$1.RpcError) { stream$3.notifyError(this.data.trailers); return; } stream$3.notifyComplete(); }); } promiseStatus() { var _a$2; const status = (_a$2 = this.data.status) !== null && _a$2 !== void 0 ? _a$2 : TestTransport.defaultStatus; return status instanceof rpc_error_1$1.RpcError ? Promise.reject(status) : Promise.resolve(status); } promiseTrailers() { var _a$2; const trailers = (_a$2 = this.data.trailers) !== null && _a$2 !== void 0 ? _a$2 : TestTransport.defaultTrailers; return trailers instanceof rpc_error_1$1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); } maybeSuppressUncaught(...promise) { if (this.suppressUncaughtRejections) for (let p of promise) p.catch(() => {}); } mergeOptions(options) { return rpc_options_1$1.mergeRpcOptions({}, options); } unary(method, input, options) { var _a$2; const requestHeaders = (_a$2 = options.meta) !== null && _a$2 !== void 0 ? _a$2 : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => {}).then(delay(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => {}).then(delay(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => {}).then(delay(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = { single: input }; return new unary_call_1$1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); } serverStreaming(method, input, options) { var _a$2; const requestHeaders = (_a$2 = options.meta) !== null && _a$2 !== void 0 ? _a$2 : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1$1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => {}).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = { single: input }; return new server_streaming_call_1$1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); } clientStreaming(method, options) { var _a$2; const requestHeaders = (_a$2 = options.meta) !== null && _a$2 !== void 0 ? _a$2 : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_) => {}).then(delay(this.responseDelay, options.abort)).then((_) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_) => {}).then(delay(this.afterResponseDelay, options.abort)).then((_) => this.promiseStatus()), trailersPromise = responsePromise.catch((_) => {}).then(delay(this.afterResponseDelay, options.abort)).then((_) => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = new TestInputStream(this.data, options.abort); return new client_streaming_call_1$1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); } duplex(method, options) { var _a$2; const requestHeaders = (_a$2 = options.meta) !== null && _a$2 !== void 0 ? _a$2 : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1$1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => {}).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = new TestInputStream(this.data, options.abort); return new duplex_streaming_call_1$1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); } }; exports.TestTransport = TestTransport; TestTransport.defaultHeaders = { responseHeader: "test" }; TestTransport.defaultStatus = { code: "OK", detail: "all good" }; TestTransport.defaultTrailers = { responseTrailer: "test" }; function delay(ms, abort) { return (v) => new Promise((resolve, reject) => { if (abort === null || abort === void 0 ? void 0 : abort.aborted) reject(new rpc_error_1$1.RpcError("user cancel", "CANCELLED")); else { const id = setTimeout(() => resolve(v), ms); if (abort) abort.addEventListener("abort", (ev) => { clearTimeout(id); reject(new rpc_error_1$1.RpcError("user cancel", "CANCELLED")); }); } }); } var TestInputStream = class { constructor(data, abort) { this._completed = false; this._sent = []; this.data = data; this.abort = abort; } get sent() { return this._sent; } get completed() { return this._completed; } send(message) { if (this.data.inputMessage instanceof rpc_error_1$1.RpcError) return Promise.reject(this.data.inputMessage); const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; return Promise.resolve(void 0).then(() => { this._sent.push(message); }).then(delay(delayMs, this.abort)); } complete() { if (this.data.inputComplete instanceof rpc_error_1$1.RpcError) return Promise.reject(this.data.inputComplete); const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; return Promise.resolve(void 0).then(() => { this._completed = true; }).then(delay(delayMs, this.abort)); } }; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js var require_rpc_interceptor = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.stackDuplexStreamingInterceptors = exports.stackClientStreamingInterceptors = exports.stackServerStreamingInterceptors = exports.stackUnaryInterceptors = exports.stackIntercept = void 0; const runtime_1$3 = require_commonjs$1(); /** * Creates a "stack" of of all interceptors specified in the given `RpcOptions`. * Used by generated client implementations. * @internal */ function stackIntercept(kind, transport, method, options, input) { var _a$2, _b$1, _c$1, _d$1; if (kind == "unary") { let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); for (const curr of ((_a$2 = options.interceptors) !== null && _a$2 !== void 0 ? _a$2 : []).filter((i) => i.interceptUnary).reverse()) { const next = tail; tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); } return tail(method, input, options); } if (kind == "serverStreaming") { let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); for (const curr of ((_b$1 = options.interceptors) !== null && _b$1 !== void 0 ? _b$1 : []).filter((i) => i.interceptServerStreaming).reverse()) { const next = tail; tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); } return tail(method, input, options); } if (kind == "clientStreaming") { let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); for (const curr of ((_c$1 = options.interceptors) !== null && _c$1 !== void 0 ? _c$1 : []).filter((i) => i.interceptClientStreaming).reverse()) { const next = tail; tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); } return tail(method, options); } if (kind == "duplex") { let tail = (mtd, opt) => transport.duplex(mtd, opt); for (const curr of ((_d$1 = options.interceptors) !== null && _d$1 !== void 0 ? _d$1 : []).filter((i) => i.interceptDuplex).reverse()) { const next = tail; tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); } return tail(method, options); } runtime_1$3.assertNever(kind); } exports.stackIntercept = stackIntercept; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackUnaryInterceptors(transport, method, input, options) { return stackIntercept("unary", transport, method, options, input); } exports.stackUnaryInterceptors = stackUnaryInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackServerStreamingInterceptors(transport, method, input, options) { return stackIntercept("serverStreaming", transport, method, options, input); } exports.stackServerStreamingInterceptors = stackServerStreamingInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackClientStreamingInterceptors(transport, method, options) { return stackIntercept("clientStreaming", transport, method, options); } exports.stackClientStreamingInterceptors = stackClientStreamingInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackDuplexStreamingInterceptors(transport, method, options) { return stackIntercept("duplex", transport, method, options); } exports.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js var require_server_call_context = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.ServerCallContextController = void 0; var ServerCallContextController = class { constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { this._cancelled = false; this._listeners = []; this.method = method; this.headers = headers; this.deadline = deadline; this.trailers = {}; this._sendRH = sendResponseHeadersFn; this.status = defaultStatus; } /** * Set the call cancelled. * * Invokes all callbacks registered with onCancel() and * sets `cancelled = true`. */ notifyCancelled() { if (!this._cancelled) { this._cancelled = true; for (let l of this._listeners) l(); } } /** * Send response headers. */ sendResponseHeaders(data) { this._sendRH(data); } /** * Is the call cancelled? * * When the client closes the connection before the server * is done, the call is cancelled. * * If you want to cancel a request on the server, throw a * RpcError with the CANCELLED status code. */ get cancelled() { return this._cancelled; } /** * Add a callback for cancellation. */ onCancel(callback) { const l = this._listeners; l.push(callback); return () => { let i = l.indexOf(callback); if (i >= 0) l.splice(i, 1); }; } }; exports.ServerCallContextController = ServerCallContextController; } }); //#endregion //#region node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js var require_commonjs = __commonJS({ "node_modules/.deno/@protobuf-ts+runtime-rpc@2.10.0/node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports) { // webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", { value: true }); var service_type_1 = require_service_type(); Object.defineProperty(exports, "ServiceType", { enumerable: true, get: function() { return service_type_1.ServiceType; } }); var reflection_info_1 = require_reflection_info(); Object.defineProperty(exports, "readMethodOptions", { enumerable: true, get: function() { return reflection_info_1.readMethodOptions; } }); Object.defineProperty(exports, "readMethodOption", { enumerable: true, get: function() { return reflection_info_1.readMethodOption; } }); Object.defineProperty(exports, "readServiceOption", { enumerable: true, get: function() { return reflection_info_1.readServiceOption; } }); var rpc_error_1 = require_rpc_error(); Object.defineProperty(exports, "RpcError", { enumerable: true, get: function() { return rpc_error_1.RpcError; } }); var rpc_options_1 = require_rpc_options(); Object.defineProperty(exports, "mergeRpcOptions", { enumerable: true, get: function() { return rpc_options_1.mergeRpcOptions; } }); var rpc_output_stream_1 = require_rpc_output_stream(); Object.defineProperty(exports, "RpcOutputStreamController", { enumerable: true, get: function() { return rpc_output_stream_1.RpcOutputStreamController; } }); var test_transport_1 = require_test_transport(); Object.defineProperty(exports, "TestTransport", { enumerable: true, get: function() { return test_transport_1.TestTransport; } }); var deferred_1 = require_deferred(); Object.defineProperty(exports, "Deferred", { enumerable: true, get: function() { return deferred_1.Deferred; } }); Object.defineProperty(exports, "DeferredState", { enumerable: true, get: function() { return deferred_1.DeferredState; } }); var duplex_streaming_call_1 = require_duplex_streaming_call(); Object.defineProperty(exports, "DuplexStreamingCall", { enumerable: true, get: function() { return duplex_streaming_call_1.DuplexStreamingCall; } }); var client_streaming_call_1 = require_client_streaming_call(); Object.defineProperty(exports, "ClientStreamingCall", { enumerable: true, get: function() { return client_streaming_call_1.ClientStreamingCall; } }); var server_streaming_call_1 = require_server_streaming_call(); Object.defineProperty(exports, "ServerStreamingCall", { enumerable: true, get: function() { return server_streaming_call_1.ServerStreamingCall; } }); var unary_call_1 = require_unary_call(); Object.defineProperty(exports, "UnaryCall", { enumerable: true, get: function() { return unary_call_1.UnaryCall; } }); var rpc_interceptor_1 = require_rpc_interceptor(); Object.defineProperty(exports, "stackIntercept", { enumerable: true, get: function() { return rpc_interceptor_1.stackIntercept; } }); Object.defineProperty(exports, "stackDuplexStreamingInterceptors", { enumerable: true, get: function() { return rpc_interceptor_1.stackDuplexStreamingInterceptors; } }); Object.defineProperty(exports, "stackClientStreamingInterceptors", { enumerable: true, get: function() { return rpc_interceptor_1.stackClientStreamingInterceptors; } }); Object.defineProperty(exports, "stackServerStreamingInterceptors", { enumerable: true, get: function() { return rpc_interceptor_1.stackServerStreamingInterceptors; } }); Object.defineProperty(exports, "stackUnaryInterceptors", { enumerable: true, get: function() { return rpc_interceptor_1.stackUnaryInterceptors; } }); var server_call_context_1 = require_server_call_context(); Object.defineProperty(exports, "ServerCallContextController", { enumerable: true, get: function() { return server_call_context_1.ServerCallContextController; } }); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js var require_cachescope = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheScope = void 0; const runtime_1$2 = require_commonjs$1(); const runtime_2$2 = require_commonjs$1(); const runtime_3$2 = require_commonjs$1(); const runtime_4$2 = require_commonjs$1(); const runtime_5$2 = require_commonjs$1(); var CacheScope$Type = class extends runtime_5$2.MessageType { constructor() { super("github.actions.results.entities.v1.CacheScope", [{ no: 1, name: "scope", kind: "scalar", T: 9 }, { no: 2, name: "permission", kind: "scalar", T: 3 }]); } create(value) { const message = { scope: "", permission: "0" }; globalThis.Object.defineProperty(message, runtime_4$2.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3$2.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.scope = reader.string(); break; case 2: message.permission = reader.int64().toString(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2$2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.scope !== "") writer.tag(1, runtime_1$2.WireType.LengthDelimited).string(message.scope); if (message.permission !== "0") writer.tag(2, runtime_1$2.WireType.Varint).int64(message.permission); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2$2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope */ exports.CacheScope = new CacheScope$Type(); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js var require_cachemetadata = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheMetadata = void 0; const runtime_1$1 = require_commonjs$1(); const runtime_2$1 = require_commonjs$1(); const runtime_3$1 = require_commonjs$1(); const runtime_4$1 = require_commonjs$1(); const runtime_5$1 = require_commonjs$1(); const cachescope_1 = require_cachescope(); var CacheMetadata$Type = class extends runtime_5$1.MessageType { constructor() { super("github.actions.results.entities.v1.CacheMetadata", [{ no: 1, name: "repository_id", kind: "scalar", T: 3 }, { no: 2, name: "scope", kind: "message", repeat: 1, T: () => cachescope_1.CacheScope }]); } create(value) { const message = { repositoryId: "0", scope: [] }; globalThis.Object.defineProperty(message, runtime_4$1.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3$1.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.repositoryId = reader.int64().toString(); break; case 2: message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2$1.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.repositoryId !== "0") writer.tag(1, runtime_1$1.WireType.Varint).int64(message.repositoryId); for (let i = 0; i < message.scope.length; i++) cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1$1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2$1.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata */ exports.CacheMetadata = new CacheMetadata$Type(); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/api/v1/cache.js var require_cache$1 = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheService = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0; const runtime_rpc_1 = require_commonjs(); const runtime_1 = require_commonjs$1(); const runtime_2 = require_commonjs$1(); const runtime_3 = require_commonjs$1(); const runtime_4 = require_commonjs$1(); const runtime_5 = require_commonjs$1(); const cachemetadata_1 = require_cachemetadata(); var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.CreateCacheEntryRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 }, { no: 3, name: "version", kind: "scalar", T: 9 } ]); } create(value) { const message = { key: "", version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case 2: message.key = reader.string(); break; case 3: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); if (message.version !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest */ exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.CreateCacheEntryResponse", [{ no: 1, name: "ok", kind: "scalar", T: 8 }, { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 }]); } create(value) { const message = { ok: false, signedUploadUrl: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.ok = reader.bool(); break; case 2: message.signedUploadUrl = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); if (message.signedUploadUrl !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse */ exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 }, { no: 3, name: "size_bytes", kind: "scalar", T: 3 }, { no: 4, name: "version", kind: "scalar", T: 9 } ]); } create(value) { const message = { key: "", sizeBytes: "0", version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case 2: message.key = reader.string(); break; case 3: message.sizeBytes = reader.int64().toString(); break; case 4: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); if (message.sizeBytes !== "0") writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); if (message.version !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest */ exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [{ no: 1, name: "ok", kind: "scalar", T: 8 }, { no: 2, name: "entry_id", kind: "scalar", T: 3 }]); } create(value) { const message = { ok: false, entryId: "0" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.ok = reader.bool(); break; case 2: message.entryId = reader.int64().toString(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); if (message.entryId !== "0") writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse */ exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 }, { no: 3, name: "restore_keys", kind: "scalar", repeat: 2, T: 9 }, { no: 4, name: "version", kind: "scalar", T: 9 } ]); } create(value) { const message = { key: "", restoreKeys: [], version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case 2: message.key = reader.string(); break; case 3: message.restoreKeys.push(reader.string()); break; case 4: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); for (let i = 0; i < message.restoreKeys.length; i++) writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); if (message.version !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest */ exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ { no: 1, name: "ok", kind: "scalar", T: 8 }, { no: 2, name: "signed_download_url", kind: "scalar", T: 9 }, { no: 3, name: "matched_key", kind: "scalar", T: 9 } ]); } create(value) { const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: message.ok = reader.bool(); break; case 2: message.signedDownloadUrl = reader.string(); break; case 3: message.matchedKey = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d$1 = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d$1); } } return message; } internalBinaryWrite(message, writer, options) { if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); if (message.signedDownloadUrl !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); if (message.matchedKey !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; /** * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse */ exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ { name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse }, { name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse }, { name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse } ]); } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js var require_cache_twirp_client = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp-client.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0; const cache_1 = require_cache$1(); var CacheServiceClientJSON = class { constructor(rpc) { this.rpc = rpc; this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); } CreateCacheEntry(request) { const data = cache_1.CreateCacheEntryRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); return promise.then((data$1) => cache_1.CreateCacheEntryResponse.fromJson(data$1, { ignoreUnknownFields: true })); } FinalizeCacheEntryUpload(request) { const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); return promise.then((data$1) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data$1, { ignoreUnknownFields: true })); } GetCacheEntryDownloadURL(request) { const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); return promise.then((data$1) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data$1, { ignoreUnknownFields: true })); } }; exports.CacheServiceClientJSON = CacheServiceClientJSON; var CacheServiceClientProtobuf = class { constructor(rpc) { this.rpc = rpc; this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); } CreateCacheEntry(request) { const data = cache_1.CreateCacheEntryRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); return promise.then((data$1) => cache_1.CreateCacheEntryResponse.fromBinary(data$1)); } FinalizeCacheEntryUpload(request) { const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); return promise.then((data$1) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data$1)); } GetCacheEntryDownloadURL(request) { const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); return promise.then((data$1) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data$1)); } }; exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/util.js var require_util = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/util.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.maskSecretUrls = exports.maskSigUrl = void 0; const core_1$1 = require_core(); /** * Masks the `sig` parameter in a URL and sets it as a secret. * * @param url - The URL containing the signature parameter to mask * @remarks * This function attempts to parse the provided URL and identify the 'sig' query parameter. * If found, it registers both the raw and URL-encoded signature values as secrets using * the Actions `setSecret` API, which prevents them from being displayed in logs. * * The function handles errors gracefully if URL parsing fails, logging them as debug messages. * * @example * ```typescript * // Mask a signature in an Azure SAS token URL * maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01'); * ``` */ function maskSigUrl(url$1) { if (!url$1) return; try { const parsedUrl = new URL(url$1); const signature = parsedUrl.searchParams.get("sig"); if (signature) { (0, core_1$1.setSecret)(signature); (0, core_1$1.setSecret)(encodeURIComponent(signature)); } } catch (error) { (0, core_1$1.debug)(`Failed to parse URL: ${url$1} ${error instanceof Error ? error.message : String(error)}`); } } exports.maskSigUrl = maskSigUrl; /** * Masks sensitive information in URLs containing signature parameters. * Currently supports masking 'sig' parameters in the 'signed_upload_url' * and 'signed_download_url' properties of the provided object. * * @param body - The object should contain a signature * @remarks * This function extracts URLs from the object properties and calls maskSigUrl * on each one to redact sensitive signature information. The function doesn't * modify the original object; it only marks the signatures as secrets for * logging purposes. * * @example * ```typescript * const responseBody = { * signed_upload_url: 'https://blob.core.windows.net/?sig=abc123', * signed_download_url: 'https://blob.core/windows.net/?sig=def456' * }; * maskSecretUrls(responseBody); * ``` */ function maskSecretUrls(body$1) { if (typeof body$1 !== "object" || body$1 === null) { (0, core_1$1.debug)("body is not an object or is null"); return; } if ("signed_upload_url" in body$1 && typeof body$1.signed_upload_url === "string") maskSigUrl(body$1.signed_upload_url); if ("signed_download_url" in body$1 && typeof body$1.signed_download_url === "string") maskSigUrl(body$1.signed_download_url); } exports.maskSecretUrls = maskSecretUrls; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js var require_cacheTwirpClient = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports) { var __awaiter$5 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.internalCacheTwirpClient = void 0; const core_1 = require_core(); const user_agent_1 = require_user_agent(); const errors_1 = require_errors(); const config_1$1 = require_config(); const cacheUtils_1 = require_cacheUtils(); const auth_1 = require_auth(); const http_client_1 = require_lib(); const cache_twirp_client_1 = require_cache_twirp_client(); const util_1 = require_util(); /** * This class is a wrapper around the CacheServiceClientJSON class generated by Twirp. * * It adds retry logic to the request method, which is not present in the generated client. * * This class is used to interact with cache service v2. */ var CacheServiceClient = class { constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { this.maxAttempts = 5; this.baseRetryIntervalMilliseconds = 3e3; this.retryMultiplier = 1.5; const token = (0, cacheUtils_1.getRuntimeToken)(); this.baseUrl = (0, config_1$1.getCacheServiceURL)(); if (maxAttempts) this.maxAttempts = maxAttempts; if (baseRetryIntervalMilliseconds) this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; if (retryMultiplier) this.retryMultiplier = retryMultiplier; this.httpClient = new http_client_1.HttpClient(userAgent, [new auth_1.BearerCredentialHandler(token)]); } request(service, method, contentType$1, data) { return __awaiter$5(this, void 0, void 0, function* () { const url$1 = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; (0, core_1.debug)(`[Request] ${method} ${url$1}`); const headers = { "Content-Type": contentType$1 }; try { const { body: body$1 } = yield this.retryableRequest(() => __awaiter$5(this, void 0, void 0, function* () { return this.httpClient.post(url$1, JSON.stringify(data), headers); })); return body$1; } catch (error) { throw new Error(`Failed to ${method}: ${error.message}`); } }); } retryableRequest(operation) { return __awaiter$5(this, void 0, void 0, function* () { let attempt = 0; let errorMessage = ""; let rawBody = ""; while (attempt < this.maxAttempts) { let isRetryable = false; try { const response = yield operation(); const statusCode = response.message.statusCode; rawBody = yield response.readBody(); (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); const body$1 = JSON.parse(rawBody); (0, util_1.maskSecretUrls)(body$1); (0, core_1.debug)(`Body: ${JSON.stringify(body$1, null, 2)}`); if (this.isSuccessStatusCode(statusCode)) return { response, body: body$1 }; isRetryable = this.isRetryableHttpStatusCode(statusCode); errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; if (body$1.msg) { if (errors_1.UsageError.isUsageErrorMessage(body$1.msg)) throw new errors_1.UsageError(); errorMessage = `${errorMessage}: ${body$1.msg}`; } } catch (error) { if (error instanceof SyntaxError) (0, core_1.debug)(`Raw Body: ${rawBody}`); if (error instanceof errors_1.UsageError) throw error; if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); isRetryable = true; errorMessage = error.message; } if (!isRetryable) throw new Error(`Received non-retryable error: ${errorMessage}`); if (attempt + 1 === this.maxAttempts) throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); yield this.sleep(retryTimeMilliseconds); attempt++; } throw new Error(`Request failed`); }); } isSuccessStatusCode(statusCode) { if (!statusCode) return false; return statusCode >= 200 && statusCode < 300; } isRetryableHttpStatusCode(statusCode) { if (!statusCode) return false; const retryableStatusCodes = [ http_client_1.HttpCodes.BadGateway, http_client_1.HttpCodes.GatewayTimeout, http_client_1.HttpCodes.InternalServerError, http_client_1.HttpCodes.ServiceUnavailable, http_client_1.HttpCodes.TooManyRequests ]; return retryableStatusCodes.includes(statusCode); } sleep(milliseconds) { return __awaiter$5(this, void 0, void 0, function* () { return new Promise((resolve) => setTimeout(resolve, milliseconds)); }); } getExponentialRetryTimeMilliseconds(attempt) { if (attempt < 0) throw new Error("attempt should be a positive integer"); if (attempt === 0) return this.baseRetryIntervalMilliseconds; const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); const maxTime = minTime * this.retryMultiplier; return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } }; function internalCacheTwirpClient(options) { const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); return new cache_twirp_client_1.CacheServiceClientJSON(client); } exports.internalCacheTwirpClient = internalCacheTwirpClient; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/tar.js var require_tar = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/internal/tar.js"(exports) { var __createBinding$8 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$8 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$8 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$8(result, mod, k); } __setModuleDefault$8(result, mod); return result; }; var __awaiter$4 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.createTar = exports.extractTar = exports.listTar = void 0; const exec_1 = require_exec(); const io = __importStar$8(require_io()); const fs_1 = __require("fs"); const path$6 = __importStar$8(__require("path")); const utils$1 = __importStar$8(require_cacheUtils()); const constants_1$1 = require_constants$3(); const IS_WINDOWS$5 = process.platform === "win32"; function getTarPath() { return __awaiter$4(this, void 0, void 0, function* () { switch (process.platform) { case "win32": { const gnuTar = yield utils$1.getGnuTarPathOnWindows(); const systemTar = constants_1$1.SystemTarPathOnWindows; if (gnuTar) return { path: gnuTar, type: constants_1$1.ArchiveToolType.GNU }; else if ((0, fs_1.existsSync)(systemTar)) return { path: systemTar, type: constants_1$1.ArchiveToolType.BSD }; break; } case "darwin": { const gnuTar = yield io.which("gtar", false); if (gnuTar) return { path: gnuTar, type: constants_1$1.ArchiveToolType.GNU }; else return { path: yield io.which("tar", true), type: constants_1$1.ArchiveToolType.BSD }; } default: break; } return { path: yield io.which("tar", true), type: constants_1$1.ArchiveToolType.GNU }; }); } function getTarArgs(tarPath, compressionMethod, type, archivePath = "") { return __awaiter$4(this, void 0, void 0, function* () { const args = [`"${tarPath.path}"`]; const cacheFileName = utils$1.getCacheFileName(compressionMethod); const tarFile = "cache.tar"; const workingDirectory = getWorkingDirectory(); const BSD_TAR_ZSTD = tarPath.type === constants_1$1.ArchiveToolType.BSD && compressionMethod !== constants_1$1.CompressionMethod.Gzip && IS_WINDOWS$5; switch (type) { case "create": args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), "--files-from", constants_1$1.ManifestFilename); break; case "extract": args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path$6.sep}`, "g"), "/")); break; case "list": args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), "-P"); break; } if (tarPath.type === constants_1$1.ArchiveToolType.GNU) switch (process.platform) { case "win32": args.push("--force-local"); break; case "darwin": args.push("--delay-directory-restore"); break; } return args; }); } function getCommands(compressionMethod, type, archivePath = "") { return __awaiter$4(this, void 0, void 0, function* () { let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1$1.ArchiveToolType.BSD && compressionMethod !== constants_1$1.CompressionMethod.Gzip && IS_WINDOWS$5; if (BSD_TAR_ZSTD && type !== "create") args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; else args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; if (BSD_TAR_ZSTD) return args; return [args.join(" ")]; }); } function getWorkingDirectory() { var _a$2; return (_a$2 = process.env["GITHUB_WORKSPACE"]) !== null && _a$2 !== void 0 ? _a$2 : process.cwd(); } function getDecompressionProgram(tarPath, compressionMethod, archivePath) { return __awaiter$4(this, void 0, void 0, function* () { const BSD_TAR_ZSTD = tarPath.type === constants_1$1.ArchiveToolType.BSD && compressionMethod !== constants_1$1.CompressionMethod.Gzip && IS_WINDOWS$5; switch (compressionMethod) { case constants_1$1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -d --long=30 --force -o", constants_1$1.TarFilename, archivePath.replace(new RegExp(`\\${path$6.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS$5 ? "\"zstd -d --long=30\"" : "unzstd --long=30"]; case constants_1$1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -d --force -o", constants_1$1.TarFilename, archivePath.replace(new RegExp(`\\${path$6.sep}`, "g"), "/") ] : ["--use-compress-program", IS_WINDOWS$5 ? "\"zstd -d\"" : "unzstd"]; default: return ["-z"]; } }); } function getCompressionProgram(tarPath, compressionMethod) { return __awaiter$4(this, void 0, void 0, function* () { const cacheFileName = utils$1.getCacheFileName(compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1$1.ArchiveToolType.BSD && compressionMethod !== constants_1$1.CompressionMethod.Gzip && IS_WINDOWS$5; switch (compressionMethod) { case constants_1$1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ "zstd -T0 --long=30 --force -o", cacheFileName.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), constants_1$1.TarFilename ] : ["--use-compress-program", IS_WINDOWS$5 ? "\"zstd -T0 --long=30\"" : "zstdmt --long=30"]; case constants_1$1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ "zstd -T0 --force -o", cacheFileName.replace(new RegExp(`\\${path$6.sep}`, "g"), "/"), constants_1$1.TarFilename ] : ["--use-compress-program", IS_WINDOWS$5 ? "\"zstd -T0\"" : "zstdmt"]; default: return ["-z"]; } }); } function execCommands(commands, cwd) { return __awaiter$4(this, void 0, void 0, function* () { for (const command of commands) try { yield (0, exec_1.exec)(command, void 0, { cwd, env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) }); } catch (error) { throw new Error(`${command.split(" ")[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } }); } function listTar(archivePath, compressionMethod) { return __awaiter$4(this, void 0, void 0, function* () { const commands = yield getCommands(compressionMethod, "list", archivePath); yield execCommands(commands); }); } exports.listTar = listTar; function extractTar(archivePath, compressionMethod) { return __awaiter$4(this, void 0, void 0, function* () { const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); const commands = yield getCommands(compressionMethod, "extract", archivePath); yield execCommands(commands); }); } exports.extractTar = extractTar; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter$4(this, void 0, void 0, function* () { (0, fs_1.writeFileSync)(path$6.join(archiveFolder, constants_1$1.ManifestFilename), sourceDirectories.join("\n")); const commands = yield getCommands(compressionMethod, "create"); yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; } }); //#endregion //#region node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/cache.js var require_cache = __commonJS({ "node_modules/.deno/@actions+cache@4.0.3/node_modules/@actions/cache/lib/cache.js"(exports) { var __createBinding$7 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$7 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$7 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$7(result, mod, k); } __setModuleDefault$7(result, mod); return result; }; var __awaiter$3 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core$3 = __importStar$7(require_core()); const path$5 = __importStar$7(__require("path")); const utils = __importStar$7(require_cacheUtils()); const cacheHttpClient = __importStar$7(require_cacheHttpClient()); const cacheTwirpClient = __importStar$7(require_cacheTwirpClient()); const config_1 = require_config(); const tar_1 = require_tar(); const constants_1 = require_constants$3(); var ValidationError = class ValidationError extends Error { constructor(message) { super(message); this.name = "ValidationError"; Object.setPrototypeOf(this, ValidationError.prototype); } }; exports.ValidationError = ValidationError; var ReserveCacheError = class ReserveCacheError extends Error { constructor(message) { super(message); this.name = "ReserveCacheError"; Object.setPrototypeOf(this, ReserveCacheError.prototype); } }; exports.ReserveCacheError = ReserveCacheError; function checkPaths(paths) { if (!paths || paths.length === 0) throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } function checkKey(key) { if (key.length > 512) throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); const regex = /^[^,]*$/; if (!regex.test(key)) throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } /** * isFeatureAvailable to check the presence of Actions cache service * * @returns boolean return true if Actions cache service feature is available, otherwise false */ function isFeatureAvailable() { return !!process.env["ACTIONS_CACHE_URL"]; } exports.isFeatureAvailable = isFeatureAvailable; /** * Restores cache from keys * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache$1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter$3(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); core$3.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case "v2": return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); case "v1": default: return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); } }); } exports.restoreCache = restoreCache$1; /** * Restores cache using the legacy Cache Service * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param options cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter$3(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; core$3.debug("Resolved Keys:"); core$3.debug(JSON.stringify(keys)); if (keys.length > 10) throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); for (const key of keys) checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ""; try { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod, enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) return void 0; if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core$3.info("Lookup only - skipping download"); return cacheEntry.cacheKey; } archivePath = path$5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core$3.debug(`Archive Path: ${archivePath}`); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core$3.isDebug()) yield (0, tar_1.listTar)(archivePath, compressionMethod); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core$3.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); core$3.info("Cache restored successfully"); return cacheEntry.cacheKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) throw error; else core$3.warning(`Failed to restore: ${error.message}`); } finally { try { yield utils.unlinkFile(archivePath); } catch (error) { core$3.debug(`Failed to delete archive: ${error}`); } } return void 0; }); } /** * Restores cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter$3(this, void 0, void 0, function* () { options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; core$3.debug("Resolved Keys:"); core$3.debug(JSON.stringify(keys)); if (keys.length > 10) throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); for (const key of keys) checkKey(key); let archivePath = ""; try { const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); const compressionMethod = yield utils.getCompressionMethod(); const request = { key: primaryKey, restoreKeys, version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { core$3.debug(`Cache not found for version ${request.version} of keys: ${keys.join(", ")}`); return void 0; } core$3.info(`Cache hit for: ${request.key}`); if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core$3.info("Lookup only - skipping download"); return response.matchedKey; } archivePath = path$5.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core$3.debug(`Archive path: ${archivePath}`); core$3.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core$3.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); if (core$3.isDebug()) yield (0, tar_1.listTar)(archivePath, compressionMethod); yield (0, tar_1.extractTar)(archivePath, compressionMethod); core$3.info("Cache restored successfully"); return response.matchedKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) throw error; else core$3.warning(`Failed to restore: ${error.message}`); } finally { try { if (archivePath) yield utils.unlinkFile(archivePath); } catch (error) { core$3.debug(`Failed to delete archive: ${error}`); } } return void 0; }); } /** * Saves a list of files with the specified key * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache$1(paths, key, options, enableCrossOsArchive = false) { return __awaiter$3(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); core$3.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { case "v2": return yield saveCacheV2(paths, key, options, enableCrossOsArchive); case "v1": default: return yield saveCacheV1(paths, key, options, enableCrossOsArchive); } }); } exports.saveCache = saveCache$1; /** * Save cache using the legacy Cache Service * * @param paths * @param key * @param options * @param enableCrossOsArchive * @returns */ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) { var _a$2, _b$1, _c$1, _d$1, _e; return __awaiter$3(this, void 0, void 0, function* () { const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); core$3.debug("Cache Paths:"); core$3.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); const archiveFolder = yield utils.createTempDirectory(); const archivePath = path$5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core$3.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core$3.isDebug()) yield (0, tar_1.listTar)(archivePath, compressionMethod); const fileSizeLimit = 10 * 1024 * 1024 * 1024; const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core$3.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); core$3.debug("Reserving Cache"); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a$2 = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a$2 === void 0 ? void 0 : _a$2.cacheId) cacheId = (_b$1 = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b$1 === void 0 ? void 0 : _b$1.cacheId; else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) throw new Error((_d$1 = (_c$1 = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c$1 === void 0 ? void 0 : _c$1.message) !== null && _d$1 !== void 0 ? _d$1 : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); else throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); core$3.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) throw error; else if (typedError.name === ReserveCacheError.name) core$3.info(`Failed to save: ${typedError.message}`); else core$3.warning(`Failed to save: ${typedError.message}`); } finally { try { yield utils.unlinkFile(archivePath); } catch (error) { core$3.debug(`Failed to delete archive: ${error}`); } } return cacheId; }); } /** * Save cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache * @param key an explicit key for restoring the cache * @param options cache upload options * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @returns */ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) { return __awaiter$3(this, void 0, void 0, function* () { options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); const compressionMethod = yield utils.getCompressionMethod(); const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); core$3.debug("Cache Paths:"); core$3.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); const archiveFolder = yield utils.createTempDirectory(); const archivePath = path$5.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core$3.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core$3.isDebug()) yield (0, tar_1.listTar)(archivePath, compressionMethod); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core$3.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); options.archiveSizeBytes = archiveFileSize; core$3.debug("Reserving Cache"); const version$1 = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, version: version$1 }; let signedUploadUrl; try { const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) throw new Error("Response was not ok"); signedUploadUrl = response.signedUploadUrl; } catch (error) { core$3.debug(`Failed to reserve cache: ${error}`); throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } core$3.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, signedUploadUrl, options); const finalizeRequest = { key, version: version$1, sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); core$3.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); cacheId = parseInt(finalizeResponse.entryId); } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) throw error; else if (typedError.name === ReserveCacheError.name) core$3.info(`Failed to save: ${typedError.message}`); else core$3.warning(`Failed to save: ${typedError.message}`); } finally { try { yield utils.unlinkFile(archivePath); } catch (error) { core$3.debug(`Failed to delete archive: ${error}`); } } return cacheId; }); } } }); var import_cache = __toESM(require_cache(), 1); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-glob-options-helper.js var require_internal_glob_options_helper = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports) { var __createBinding$6 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$6 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$6 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$6(result, mod, k); } __setModuleDefault$6(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.getOptions = void 0; const core$2 = __importStar$6(require_core()); /** * Returns a copy with defaults filled in. */ function getOptions(copy$1) { const result = { followSymbolicLinks: true, implicitDescendants: true, matchDirectories: true, omitBrokenSymbolicLinks: true, excludeHiddenFiles: false }; if (copy$1) { if (typeof copy$1.followSymbolicLinks === "boolean") { result.followSymbolicLinks = copy$1.followSymbolicLinks; core$2.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy$1.implicitDescendants === "boolean") { result.implicitDescendants = copy$1.implicitDescendants; core$2.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy$1.matchDirectories === "boolean") { result.matchDirectories = copy$1.matchDirectories; core$2.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy$1.omitBrokenSymbolicLinks === "boolean") { result.omitBrokenSymbolicLinks = copy$1.omitBrokenSymbolicLinks; core$2.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy$1.excludeHiddenFiles === "boolean") { result.excludeHiddenFiles = copy$1.excludeHiddenFiles; core$2.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; } exports.getOptions = getOptions; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-path-helper.js var require_internal_path_helper = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-path-helper.js"(exports) { var __createBinding$5 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$5 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$5 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$5(result, mod, k); } __setModuleDefault$5(result, mod); return result; }; var __importDefault$2 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path$4 = __importStar$5(__require("path")); const assert_1$2 = __importDefault$2(__require("assert")); const IS_WINDOWS$4 = process.platform === "win32"; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname(p) { p = safeTrimTrailingSeparator(p); if (IS_WINDOWS$4 && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) return p; let result = path$4.dirname(p); if (IS_WINDOWS$4 && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) result = safeTrimTrailingSeparator(result); return result; } exports.dirname = dirname; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot(root, itemPath) { (0, assert_1$2.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); (0, assert_1$2.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); if (hasAbsoluteRoot(itemPath)) return itemPath; if (IS_WINDOWS$4) { if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); (0, assert_1$2.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) if (itemPath.length === 2) return `${itemPath[0]}:\\${cwd.substr(3)}`; else { if (!cwd.endsWith("\\")) cwd += "\\"; return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } else return `${itemPath[0]}:\\${itemPath.substr(2)}`; } else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); (0, assert_1$2.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } (0, assert_1$2.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); if (root.endsWith("/") || IS_WINDOWS$4 && root.endsWith("\\")) {} else root += path$4.sep; return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot(itemPath) { (0, assert_1$2.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); itemPath = normalizeSeparators(itemPath); if (IS_WINDOWS$4) return itemPath.startsWith("\\\\") || /^[A-Z]:\\/i.test(itemPath); return itemPath.startsWith("/"); } exports.hasAbsoluteRoot = hasAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot(itemPath) { (0, assert_1$2.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); itemPath = normalizeSeparators(itemPath); if (IS_WINDOWS$4) return itemPath.startsWith("\\") || /^[A-Z]:/i.test(itemPath); return itemPath.startsWith("/"); } exports.hasRoot = hasRoot; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators(p) { p = p || ""; if (IS_WINDOWS$4) { p = p.replace(/\//g, "\\"); const isUnc = /^\\\\+[^\\]/.test(p); return (isUnc ? "\\" : "") + p.replace(/\\\\+/g, "\\"); } return p.replace(/\/\/+/g, "/"); } exports.normalizeSeparators = normalizeSeparators; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator(p) { if (!p) return ""; p = normalizeSeparators(p); if (!p.endsWith(path$4.sep)) return p; if (p === path$4.sep) return p; if (IS_WINDOWS$4 && /^[A-Z]:\\$/i.test(p)) return p; return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-match-kind.js var require_internal_match_kind = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-match-kind.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind; (function(MatchKind$2) { /** Not matched */ MatchKind$2[MatchKind$2["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind$2[MatchKind$2["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind$2[MatchKind$2["File"] = 2] = "File"; /** Matched */ MatchKind$2[MatchKind$2["All"] = 3] = "All"; })(MatchKind || (exports.MatchKind = MatchKind = {})); } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-pattern-helper.js var require_internal_pattern_helper = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-pattern-helper.js"(exports) { var __createBinding$4 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$4 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$4 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$4(result, mod, k); } __setModuleDefault$4(result, mod); return result; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper$2 = __importStar$4(require_internal_path_helper()); const internal_match_kind_1$2 = require_internal_match_kind(); const IS_WINDOWS$3 = process.platform === "win32"; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths(patterns) { patterns = patterns.filter((x) => !x.negate); const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS$3 ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = "candidate"; } const result = []; for (const pattern of patterns) { const key = IS_WINDOWS$3 ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === "included") continue; let foundAncestor = false; let tempKey = key; let parent = pathHelper$2.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper$2.dirname(tempKey); } if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = "included"; } } return result; } exports.getSearchPaths = getSearchPaths; /** * Matches the patterns against the path */ function match(patterns, itemPath) { let result = internal_match_kind_1$2.MatchKind.None; for (const pattern of patterns) if (pattern.negate) result &= ~pattern.match(itemPath); else result |= pattern.match(itemPath); return result; } exports.match = match; /** * Checks whether to descend further into the directory */ function partialMatch(patterns, itemPath) { return patterns.some((x) => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-path.js var require_internal_path = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-path.js"(exports) { var __createBinding$3 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$3 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$3 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$3(result, mod, k); } __setModuleDefault$3(result, mod); return result; }; var __importDefault$1 = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Path = void 0; const path$3 = __importStar$3(__require("path")); const pathHelper$1 = __importStar$3(require_internal_path_helper()); const assert_1$1 = __importDefault$1(__require("assert")); const IS_WINDOWS$2 = process.platform === "win32"; /** * Helper class for parsing paths into segments */ var Path = class { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; if (typeof itemPath === "string") { (0, assert_1$1.default)(itemPath, `Parameter 'itemPath' must not be empty`); itemPath = pathHelper$1.safeTrimTrailingSeparator(itemPath); if (!pathHelper$1.hasRoot(itemPath)) this.segments = itemPath.split(path$3.sep); else { let remaining = itemPath; let dir = pathHelper$1.dirname(remaining); while (dir !== remaining) { const basename = path$3.basename(remaining); this.segments.unshift(basename); remaining = dir; dir = pathHelper$1.dirname(remaining); } this.segments.unshift(remaining); } } else { (0, assert_1$1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; (0, assert_1$1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); segment = pathHelper$1.normalizeSeparators(itemPath[i]); if (i === 0 && pathHelper$1.hasRoot(segment)) { segment = pathHelper$1.safeTrimTrailingSeparator(segment); (0, assert_1$1.default)(segment === pathHelper$1.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } else { (0, assert_1$1.default)(!segment.includes(path$3.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { let result = this.segments[0]; let skipSlash = result.endsWith(path$3.sep) || IS_WINDOWS$2 && /^[A-Z]:$/i.test(result); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) skipSlash = false; else result += path$3.sep; result += this.segments[i]; } return result; } }; exports.Path = Path; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-pattern.js var require_internal_pattern = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-pattern.js"(exports) { var __createBinding$2 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$2 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$2 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$2(result, mod, k); } __setModuleDefault$2(result, mod); return result; }; var __importDefault = exports && exports.__importDefault || function(mod) { return mod && mod.__esModule ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); exports.Pattern = void 0; const os = __importStar$2(__require("os")); const path$2 = __importStar$2(__require("path")); const pathHelper = __importStar$2(require_internal_path_helper()); const assert_1 = __importDefault(__require("assert")); const minimatch_1 = require_minimatch(); const internal_match_kind_1$1 = require_internal_match_kind(); const internal_path_1 = require_internal_path(); const IS_WINDOWS$1 = process.platform === "win32"; var Pattern = class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; let pattern; if (typeof patternOrNegate === "string") pattern = patternOrNegate.trim(); else { segments = segments || []; (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); const root = Pattern.getLiteral(segments[0]); (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1.Path(segments).toString().trim(); if (patternOrNegate) pattern = `!${pattern}`; } while (pattern.startsWith("!")) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } pattern = Pattern.fixupPattern(pattern, homedir); this.segments = new internal_path_1.Path(pattern).segments; this.trailingSeparator = pathHelper.normalizeSeparators(pattern).endsWith(path$2.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); let foundGlob = false; const searchSegments = this.segments.map((x) => Pattern.getLiteral(x)).filter((x) => !foundGlob && !(foundGlob = x === "")); this.searchPath = new internal_path_1.Path(searchSegments).toString(); this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS$1 ? "i" : ""); this.isImplicitPattern = isImplicitPattern; const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS$1, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS$1 ? pattern.replace(/\\/g, "/") : pattern; this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { if (this.segments[this.segments.length - 1] === "**") { itemPath = pathHelper.normalizeSeparators(itemPath); if (!itemPath.endsWith(path$2.sep) && this.isImplicitPattern === false) itemPath = `${itemPath}${path$2.sep}`; } else itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (this.minimatch.match(itemPath)) return this.trailingSeparator ? internal_match_kind_1$1.MatchKind.Directory : internal_match_kind_1$1.MatchKind.All; return internal_match_kind_1$1.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); if (pathHelper.dirname(itemPath) === itemPath) return this.rootRegExp.test(itemPath); return this.minimatch.matchOne(itemPath.split(IS_WINDOWS$1 ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s$1) { return (IS_WINDOWS$1 ? s$1 : s$1.replace(/\\/g, "\\\\")).replace(/(\[)(?=[^/]+\])/g, "[[]").replace(/\?/g, "[?]").replace(/\*/g, "[*]"); } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { (0, assert_1.default)(pattern, "pattern cannot be empty"); const literalSegments = new internal_path_1.Path(pattern).segments.map((x) => Pattern.getLiteral(x)); (0, assert_1.default)(literalSegments.every((x, i) => (x !== "." || i === 0) && x !== ".."), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); pattern = pathHelper.normalizeSeparators(pattern); if (pattern === "." || pattern.startsWith(`.${path$2.sep}`)) pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); else if (pattern === "~" || pattern.startsWith(`~${path$2.sep}`)) { homedir = homedir || os.homedir(); (0, assert_1.default)(homedir, "Unable to determine HOME directory"); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern.globEscape(homedir) + pattern.substr(1); } else if (IS_WINDOWS$1 && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith("\\")) root += "\\"; pattern = Pattern.globEscape(root) + pattern.substr(2); } else if (IS_WINDOWS$1 && (pattern === "\\" || pattern.match(/^\\[^\\]/))) { let root = pathHelper.ensureAbsoluteRoot("C:\\dummy-root", "\\"); if (!root.endsWith("\\")) root += "\\"; pattern = Pattern.globEscape(root) + pattern.substr(1); } else pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); return pathHelper.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ""; for (let i = 0; i < segment.length; i++) { const c = segment[i]; if (c === "\\" && !IS_WINDOWS$1 && i + 1 < segment.length) { literal += segment[++i]; continue; } else if (c === "*" || c === "?") return ""; else if (c === "[" && i + 1 < segment.length) { let set = ""; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; if (c2 === "\\" && !IS_WINDOWS$1 && i2 + 1 < segment.length) { set += segment[++i2]; continue; } else if (c2 === "]") { closed = i2; break; } else set += c2; } if (closed >= 0) { if (set.length > 1) return ""; if (set) { literal += set; i = closed; continue; } } } literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s$1) { return s$1.replace(/[[\\^$.|?*+()]/g, "\\$&"); } }; exports.Pattern = Pattern; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-search-state.js var require_internal_search_state = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-search-state.js"(exports) { Object.defineProperty(exports, "__esModule", { value: true }); exports.SearchState = void 0; var SearchState = class { constructor(path$13, level) { this.path = path$13; this.level = level; } }; exports.SearchState = SearchState; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-globber.js var require_internal_globber = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-globber.js"(exports) { var __createBinding$1 = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault$1 = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar$1 = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding$1(result, mod, k); } __setModuleDefault$1(result, mod); return result; }; var __awaiter$2 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues$1 = exports && exports.__asyncValues || function(o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m$1 = o[Symbol.asyncIterator], i; return m$1 ? m$1.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i); function verb(n) { i[n] = o[n] && function(v) { return new Promise(function(resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d$1, v) { Promise.resolve(v).then(function(v$1) { resolve({ value: v$1, done: d$1 }); }, reject); } }; var __await = exports && exports.__await || function(v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }; var __asyncGenerator = exports && exports.__asyncGenerator || function(thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i; function verb(n) { if (g[n]) i[n] = function(v) { return new Promise(function(a, b) { q.push([ n, v, a, b ]) > 1 || resume$1(n, v); }); }; } function resume$1(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume$1("next", value); } function reject(value) { resume$1("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume$1(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", { value: true }); exports.DefaultGlobber = void 0; const core$1 = __importStar$1(require_core()); const fs$1 = __importStar$1(__require("fs")); const globOptionsHelper = __importStar$1(require_internal_glob_options_helper()); const path$1 = __importStar$1(__require("path")); const patternHelper = __importStar$1(require_internal_pattern_helper()); const internal_match_kind_1 = require_internal_match_kind(); const internal_pattern_1 = require_internal_pattern(); const internal_search_state_1 = require_internal_search_state(); const IS_WINDOWS = process.platform === "win32"; var DefaultGlobber = class DefaultGlobber { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper.getOptions(options); } getSearchPaths() { return this.searchPaths.slice(); } glob() { var _a$2, e_1, _b$1, _c$1; return __awaiter$2(this, void 0, void 0, function* () { const result = []; try { for (var _d$1 = true, _e = __asyncValues$1(this.globGenerator()), _f; _f = yield _e.next(), _a$2 = _f.done, !_a$2; _d$1 = true) { _c$1 = _f.value; _d$1 = false; const itemPath = _c$1; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d$1 && !_a$2 && (_b$1 = _e.return)) yield _b$1.call(_e); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator(this, arguments, function* globGenerator_1() { const options = globOptionsHelper.getOptions(this.options); const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== "**")) patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat("**"))); } const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { core$1.debug(`Search path '${searchPath}'`); try { yield __await(fs$1.promises.lstat(searchPath)); } catch (err) { if (err.code === "ENOENT") continue; throw err; } stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } const traversalChain = []; while (stack.length) { const item = stack.pop(); const match$2 = patternHelper.match(patterns, item.path); const partialMatch$2 = !!match$2 || patternHelper.partialMatch(patterns, item.path); if (!match$2 && !partialMatch$2) continue; const stats = yield __await( DefaultGlobber.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); if (!stats) continue; if (options.excludeHiddenFiles && path$1.basename(item.path).match(/^\./)) continue; if (stats.isDirectory()) { if (match$2 & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) yield yield __await(item.path); else if (!partialMatch$2) continue; const childLevel = item.level + 1; const childItems = (yield __await(fs$1.promises.readdir(item.path))).map((x) => new internal_search_state_1.SearchState(path$1.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } else if (match$2 & internal_match_kind_1.MatchKind.File) yield yield __await(item.path); } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter$2(this, void 0, void 0, function* () { const result = new DefaultGlobber(options); if (IS_WINDOWS) { patterns = patterns.replace(/\r\n/g, "\n"); patterns = patterns.replace(/\r/g, "\n"); } const lines = patterns.split("\n").map((x) => x.trim()); for (const line of lines) if (!line || line.startsWith("#")) continue; else result.patterns.push(new internal_pattern_1.Pattern(line)); result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter$2(this, void 0, void 0, function* () { let stats; if (options.followSymbolicLinks) try { stats = yield fs$1.promises.stat(item.path); } catch (err) { if (err.code === "ENOENT") { if (options.omitBrokenSymbolicLinks) { core$1.debug(`Broken symlink '${item.path}'`); return void 0; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } else stats = yield fs$1.promises.lstat(item.path); if (stats.isDirectory() && options.followSymbolicLinks) { const realPath = yield fs$1.promises.realpath(item.path); while (traversalChain.length >= item.level) traversalChain.pop(); if (traversalChain.some((x) => x === realPath)) { core$1.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return void 0; } traversalChain.push(realPath); } return stats; }); } }; exports.DefaultGlobber = DefaultGlobber; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-hash-files.js var require_internal_hash_files = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/internal-hash-files.js"(exports) { var __createBinding = exports && exports.__createBinding || (Object.create ? function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m$1, k); if (!desc || ("get" in desc ? !m$1.__esModule : desc.writable || desc.configurable)) desc = { enumerable: true, get: function() { return m$1[k]; } }; Object.defineProperty(o, k2, desc); } : function(o, m$1, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m$1[k]; }); var __setModuleDefault = exports && exports.__setModuleDefault || (Object.create ? function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function(o, v) { o["default"] = v; }); var __importStar = exports && exports.__importStar || function(mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; }; var __awaiter$1 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = exports && exports.__asyncValues || function(o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m$1 = o[Symbol.asyncIterator], i; return m$1 ? m$1.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function() { return this; }, i); function verb(n) { i[n] = o[n] && function(v) { return new Promise(function(resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d$1, v) { Promise.resolve(v).then(function(v$1) { resolve({ value: v$1, done: d$1 }); }, reject); } }; Object.defineProperty(exports, "__esModule", { value: true }); exports.hashFiles = void 0; const crypto = __importStar(__require("crypto")); const core = __importStar(require_core()); const fs = __importStar(__require("fs")); const stream = __importStar(__require("stream")); const util = __importStar(__require("util")); const path = __importStar(__require("path")); function hashFiles$1(globber, currentWorkspace, verbose = false) { var _a$2, e_1, _b$1, _c$1; var _d$1; return __awaiter$1(this, void 0, void 0, function* () { const writeDelegate = verbose ? core.info : core.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d$1 = process.env["GITHUB_WORKSPACE"]) !== null && _d$1 !== void 0 ? _d$1 : process.cwd(); const result = crypto.createHash("sha256"); let count = 0; try { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a$2 = _g.done, !_a$2; _e = true) { _c$1 = _g.value; _e = false; const file = _c$1; writeDelegate(file); if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } if (fs.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto.createHash("sha256"); const pipeline = util.promisify(stream.pipeline); yield pipeline(fs.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) hasMatch = true; } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_e && !_a$2 && (_b$1 = _f.return)) yield _b$1.call(_f); } finally { if (e_1) throw e_1.error; } } result.end(); if (hasMatch) { writeDelegate(`Found ${count} files to hash.`); return result.digest("hex"); } else { writeDelegate(`No matches found for glob`); return ""; } }); } exports.hashFiles = hashFiles$1; } }); //#endregion //#region node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/glob.js var require_glob = __commonJS({ "node_modules/.deno/@actions+glob@0.5.0/node_modules/@actions/glob/lib/glob.js"(exports) { var __awaiter = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { resolve(value); }); } return new (P || (P = Promise))(function(resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); exports.hashFiles = exports.create = void 0; const internal_globber_1 = require_internal_globber(); const internal_hash_files_1 = require_internal_hash_files(); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } exports.create = create; /** * Computes the sha256 hash of a glob * * @param patterns Patterns separated by newlines * @param currentWorkspace Workspace used when matching files * @param options Glob options * @param verbose Enables verbose logging */ function hashFiles(patterns, currentWorkspace = "", options, verbose = false) { return __awaiter(this, void 0, void 0, function* () { let followSymbolicLinks = true; if (options && typeof options.followSymbolicLinks === "boolean") followSymbolicLinks = options.followSymbolicLinks; const globber = yield create(patterns, { followSymbolicLinks }); return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); }); } exports.hashFiles = hashFiles; } }); var import_glob = __toESM(require_glob(), 1); //#endregion //#region src/cache.ts const state = { DENO_DIR: "DENO_DIR", CACHE_HIT: "CACHE_HIT", CACHE_SAVE: "CACHE_SAVE" }; async function saveCache() { if (!import_cache.isFeatureAvailable()) { import_core.warning("Caching is not available. Caching is skipped."); return; } const denoDir = import_core.getState(state.DENO_DIR); const saveKey = import_core.getState(state.CACHE_SAVE); if (!denoDir || !saveKey) { import_core.info("Caching is not enabled. Caching is skipped."); return; } else if (import_core.getState(state.CACHE_HIT) === "true") { import_core.info(`Cache hit occurred on the primary key "${saveKey}", not saving cache.`); return; } await import_cache.saveCache([denoDir], saveKey); import_core.info(`Cache saved with key: "${saveKey}".`); } async function restoreCache(cacheHash) { try { const denoDir = await resolveDenoDir(); import_core.saveState(state.DENO_DIR, denoDir); if (cacheHash.length === 0) cacheHash = await resolveDefaultCacheKey(); const { GITHUB_JOB, RUNNER_OS, RUNNER_ARCH } = process$1.env; const restoreKey = `deno-cache-${RUNNER_OS}-${RUNNER_ARCH}`; const primaryKey = `${restoreKey}-${GITHUB_JOB}-${cacheHash}`; import_core.saveState(state.CACHE_SAVE, primaryKey); const loadedCacheKey = await import_cache.restoreCache([denoDir], primaryKey, [restoreKey]); const cacheHit = primaryKey === loadedCacheKey; import_core.setOutput("cache-hit", cacheHit); import_core.saveState(state.CACHE_HIT, cacheHit); const message = loadedCacheKey ? `Cache key used: "${loadedCacheKey}".` : `No cache found for restore key: "${restoreKey}".`; import_core.info(message); } catch (err) { import_core.warning(new Error("Failed to restore cache. Continuing without cache.")); import_core.warning(err); } } function resolveDefaultCacheKey() { return (0, import_glob.hashFiles)("**/deno.lock", process$1.env.GITHUB_WORKSPACE); } async function resolveDenoDir() { const { DENO_DIR } = process$1.env; if (DENO_DIR) return DENO_DIR; const output = await exec("deno info --json"); const info = JSON.parse(output); if (typeof info.denoDir !== "string") throw new Error("`deno info --json` output did not contain a denoDir property. Maybe try updating this action or your Deno version if either are old."); return info.denoDir; } async function exec(command) { const { exec: exec$2 } = await import("node:child_process"); return await new Promise((res, rej) => { exec$2(command, (err, stdout) => err ? rej(err) : res(stdout)); }); } //#endregion export { restoreCache, saveCache };